diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/ServerExcludedException.java hbase-client/src/main/java/org/apache/hadoop/hbase/ServerExcludedException.java new file mode 100644 index 0000000..d920650 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/ServerExcludedException.java @@ -0,0 +1,34 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import java.io.IOException; + +/** + * This exception is thrown, when a server is added to exclusion list or if it does not present in + * inclusion list. + */ +public class ServerExcludedException extends IOException { + + private static final long serialVersionUID = 4179015119193771918L; + + public ServerExcludedException(String msg) { + super(msg); + } +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index 2c9b5a3..fc1ac05 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -1216,4 +1216,30 @@ public interface Admin extends Abortable, Closeable { * @return A MasterCoprocessorRpcChannel instance */ CoprocessorRpcChannel coprocessorService(); + + /** + * Returns all the names of the region servers which were added to the exclusion list. + * @return + * @throws IOException + */ + List getExcludedRegionsServers() throws IOException; + + /** + * Returns all the names of the region servers which were added to the inclusion list. + * @return + * @throws IOException + */ + List getIncludedRegionServers() throws IOException; + + /** + * @param hostName, can be just hostname, or hostname:port + * @throws IOException + */ + void addRegionServerToExclusionList(String hostName) throws IOException; + + /** + * @param hostName, can be just hostname, or hostname:port + * @throws IOException + */ + void addRegionServerToInclusionList(String hostName) throws IOException; } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java index ad18e23..5ffa818 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java @@ -77,6 +77,10 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServic import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest; @@ -107,6 +111,10 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusR import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; @@ -153,8 +161,8 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest; -import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse; @@ -2016,6 +2024,33 @@ class ConnectionManager { throws ServiceException { return stub.getClusterStatus(controller, request); } + + @Override + public GetExcludedRegionsServersResponse getExcludedRegionsServers( + RpcController controller, GetExcludedRegionsServersRequest request) + throws ServiceException { + return stub.getExcludedRegionsServers(controller, request); + } + + @Override + public GetIncludedRegionServersResponse getIncludedRegionServers(RpcController controller, + GetIncludedRegionServersRequest request) throws ServiceException { + return stub.getIncludedRegionServers(controller, request); + } + + @Override + public AddRegionServerToExclusionListResponse addRegionServerToExclusionList( + RpcController controller, AddRegionServerToExclusionListRequest request) + throws ServiceException { + return stub.addRegionServerToExclusionList(controller, request); + } + + @Override + public AddRegionServerToInclusionListResponse addRegionServerToInclusionList( + RpcController controller, AddRegionServerToInclusionListRequest request) + throws ServiceException { + return stub.addRegionServerToInclusionList(controller, request); + } }; } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index fe8fb31..3678882 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -91,6 +91,8 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescripti import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest; @@ -105,6 +107,8 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse; @@ -3524,4 +3528,54 @@ public synchronized byte[][] rollHLogWriter(String serverName) return true; } } + + @Override + public List getExcludedRegionsServers() throws IOException { + return executeCallable(new MasterCallable>(getConnection()) { + + @Override + public List call(int callTimeout) throws Exception { + return master.getExcludedRegionsServers(null, + GetExcludedRegionsServersRequest.newBuilder().build()).getExcludedServersList(); + } + }); + } + + @Override + public List getIncludedRegionServers() throws IOException { + return executeCallable(new MasterCallable>(getConnection()) { + + @Override + public List call(int callTimeout) throws Exception { + return master.getIncludedRegionServers(null, + GetIncludedRegionServersRequest.newBuilder().build()).getIncludedServersList(); + } + }); + } + + @Override + public void addRegionServerToExclusionList(final String hostName) throws IOException { + executeCallable(new MasterCallable(getConnection()) { + + @Override + public Void call(int callTimeout) throws Exception { + master.addRegionServerToExclusionList(null, AddRegionServerToExclusionListRequest + .newBuilder().setHostName(hostName).build()); + return null; + } + }); + } + + @Override + public void addRegionServerToInclusionList(final String hostName) throws IOException { + executeCallable(new MasterCallable(getConnection()) { + + @Override + public Void call(int callTimeout) throws Exception { + master.addRegionServerToInclusionList(null, AddRegionServerToInclusionListRequest + .newBuilder().setHostName(hostName).build()); + return null; + } + }); + } } diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java index ee1ab67..ec302c9 100644 --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java @@ -40626,6 +40626,3440 @@ public final class MasterProtos { // @@protoc_insertion_point(class_scope:IsProcedureDoneResponse) } + public interface GetExcludedRegionsServersRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code GetExcludedRegionsServersRequest} + */ + public static final class GetExcludedRegionsServersRequest extends + com.google.protobuf.GeneratedMessage + implements GetExcludedRegionsServersRequestOrBuilder { + // Use GetExcludedRegionsServersRequest.newBuilder() to construct. + private GetExcludedRegionsServersRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private GetExcludedRegionsServersRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final GetExcludedRegionsServersRequest defaultInstance; + public static GetExcludedRegionsServersRequest getDefaultInstance() { + return defaultInstance; + } + + public GetExcludedRegionsServersRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetExcludedRegionsServersRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetExcludedRegionsServersRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetExcludedRegionsServersRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code GetExcludedRegionsServersRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:GetExcludedRegionsServersRequest) + } + + static { + defaultInstance = new GetExcludedRegionsServersRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetExcludedRegionsServersRequest) + } + + public interface GetExcludedRegionsServersResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string excludedServers = 1; + /** + * repeated string excludedServers = 1; + */ + java.util.List + getExcludedServersList(); + /** + * repeated string excludedServers = 1; + */ + int getExcludedServersCount(); + /** + * repeated string excludedServers = 1; + */ + java.lang.String getExcludedServers(int index); + /** + * repeated string excludedServers = 1; + */ + com.google.protobuf.ByteString + getExcludedServersBytes(int index); + } + /** + * Protobuf type {@code GetExcludedRegionsServersResponse} + */ + public static final class GetExcludedRegionsServersResponse extends + com.google.protobuf.GeneratedMessage + implements GetExcludedRegionsServersResponseOrBuilder { + // Use GetExcludedRegionsServersResponse.newBuilder() to construct. + private GetExcludedRegionsServersResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private GetExcludedRegionsServersResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final GetExcludedRegionsServersResponse defaultInstance; + public static GetExcludedRegionsServersResponse getDefaultInstance() { + return defaultInstance; + } + + public GetExcludedRegionsServersResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetExcludedRegionsServersResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + excludedServers_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + excludedServers_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + excludedServers_ = new com.google.protobuf.UnmodifiableLazyStringList(excludedServers_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetExcludedRegionsServersResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetExcludedRegionsServersResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated string excludedServers = 1; + public static final int EXCLUDEDSERVERS_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList excludedServers_; + /** + * repeated string excludedServers = 1; + */ + public java.util.List + getExcludedServersList() { + return excludedServers_; + } + /** + * repeated string excludedServers = 1; + */ + public int getExcludedServersCount() { + return excludedServers_.size(); + } + /** + * repeated string excludedServers = 1; + */ + public java.lang.String getExcludedServers(int index) { + return excludedServers_.get(index); + } + /** + * repeated string excludedServers = 1; + */ + public com.google.protobuf.ByteString + getExcludedServersBytes(int index) { + return excludedServers_.getByteString(index); + } + + private void initFields() { + excludedServers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < excludedServers_.size(); i++) { + output.writeBytes(1, excludedServers_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < excludedServers_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(excludedServers_.getByteString(i)); + } + size += dataSize; + size += 1 * getExcludedServersList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse) obj; + + boolean result = true; + result = result && getExcludedServersList() + .equals(other.getExcludedServersList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getExcludedServersCount() > 0) { + hash = (37 * hash) + EXCLUDEDSERVERS_FIELD_NUMBER; + hash = (53 * hash) + getExcludedServersList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code GetExcludedRegionsServersResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + excludedServers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetExcludedRegionsServersResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + excludedServers_ = new com.google.protobuf.UnmodifiableLazyStringList( + excludedServers_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.excludedServers_ = excludedServers_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.getDefaultInstance()) return this; + if (!other.excludedServers_.isEmpty()) { + if (excludedServers_.isEmpty()) { + excludedServers_ = other.excludedServers_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureExcludedServersIsMutable(); + excludedServers_.addAll(other.excludedServers_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated string excludedServers = 1; + private com.google.protobuf.LazyStringList excludedServers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureExcludedServersIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + excludedServers_ = new com.google.protobuf.LazyStringArrayList(excludedServers_); + bitField0_ |= 0x00000001; + } + } + /** + * repeated string excludedServers = 1; + */ + public java.util.List + getExcludedServersList() { + return java.util.Collections.unmodifiableList(excludedServers_); + } + /** + * repeated string excludedServers = 1; + */ + public int getExcludedServersCount() { + return excludedServers_.size(); + } + /** + * repeated string excludedServers = 1; + */ + public java.lang.String getExcludedServers(int index) { + return excludedServers_.get(index); + } + /** + * repeated string excludedServers = 1; + */ + public com.google.protobuf.ByteString + getExcludedServersBytes(int index) { + return excludedServers_.getByteString(index); + } + /** + * repeated string excludedServers = 1; + */ + public Builder setExcludedServers( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExcludedServersIsMutable(); + excludedServers_.set(index, value); + onChanged(); + return this; + } + /** + * repeated string excludedServers = 1; + */ + public Builder addExcludedServers( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExcludedServersIsMutable(); + excludedServers_.add(value); + onChanged(); + return this; + } + /** + * repeated string excludedServers = 1; + */ + public Builder addAllExcludedServers( + java.lang.Iterable values) { + ensureExcludedServersIsMutable(); + super.addAll(values, excludedServers_); + onChanged(); + return this; + } + /** + * repeated string excludedServers = 1; + */ + public Builder clearExcludedServers() { + excludedServers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * repeated string excludedServers = 1; + */ + public Builder addExcludedServersBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureExcludedServersIsMutable(); + excludedServers_.add(value); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetExcludedRegionsServersResponse) + } + + static { + defaultInstance = new GetExcludedRegionsServersResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetExcludedRegionsServersResponse) + } + + public interface GetIncludedRegionServersRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code GetIncludedRegionServersRequest} + */ + public static final class GetIncludedRegionServersRequest extends + com.google.protobuf.GeneratedMessage + implements GetIncludedRegionServersRequestOrBuilder { + // Use GetIncludedRegionServersRequest.newBuilder() to construct. + private GetIncludedRegionServersRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private GetIncludedRegionServersRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final GetIncludedRegionServersRequest defaultInstance; + public static GetIncludedRegionServersRequest getDefaultInstance() { + return defaultInstance; + } + + public GetIncludedRegionServersRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetIncludedRegionServersRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetIncludedRegionServersRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetIncludedRegionServersRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code GetIncludedRegionServersRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:GetIncludedRegionServersRequest) + } + + static { + defaultInstance = new GetIncludedRegionServersRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetIncludedRegionServersRequest) + } + + public interface GetIncludedRegionServersResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string includedServers = 1; + /** + * repeated string includedServers = 1; + */ + java.util.List + getIncludedServersList(); + /** + * repeated string includedServers = 1; + */ + int getIncludedServersCount(); + /** + * repeated string includedServers = 1; + */ + java.lang.String getIncludedServers(int index); + /** + * repeated string includedServers = 1; + */ + com.google.protobuf.ByteString + getIncludedServersBytes(int index); + } + /** + * Protobuf type {@code GetIncludedRegionServersResponse} + */ + public static final class GetIncludedRegionServersResponse extends + com.google.protobuf.GeneratedMessage + implements GetIncludedRegionServersResponseOrBuilder { + // Use GetIncludedRegionServersResponse.newBuilder() to construct. + private GetIncludedRegionServersResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private GetIncludedRegionServersResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final GetIncludedRegionServersResponse defaultInstance; + public static GetIncludedRegionServersResponse getDefaultInstance() { + return defaultInstance; + } + + public GetIncludedRegionServersResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetIncludedRegionServersResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + includedServers_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + includedServers_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + includedServers_ = new com.google.protobuf.UnmodifiableLazyStringList(includedServers_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetIncludedRegionServersResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetIncludedRegionServersResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated string includedServers = 1; + public static final int INCLUDEDSERVERS_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList includedServers_; + /** + * repeated string includedServers = 1; + */ + public java.util.List + getIncludedServersList() { + return includedServers_; + } + /** + * repeated string includedServers = 1; + */ + public int getIncludedServersCount() { + return includedServers_.size(); + } + /** + * repeated string includedServers = 1; + */ + public java.lang.String getIncludedServers(int index) { + return includedServers_.get(index); + } + /** + * repeated string includedServers = 1; + */ + public com.google.protobuf.ByteString + getIncludedServersBytes(int index) { + return includedServers_.getByteString(index); + } + + private void initFields() { + includedServers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < includedServers_.size(); i++) { + output.writeBytes(1, includedServers_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < includedServers_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(includedServers_.getByteString(i)); + } + size += dataSize; + size += 1 * getIncludedServersList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse) obj; + + boolean result = true; + result = result && getIncludedServersList() + .equals(other.getIncludedServersList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getIncludedServersCount() > 0) { + hash = (37 * hash) + INCLUDEDSERVERS_FIELD_NUMBER; + hash = (53 * hash) + getIncludedServersList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code GetIncludedRegionServersResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + includedServers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetIncludedRegionServersResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + includedServers_ = new com.google.protobuf.UnmodifiableLazyStringList( + includedServers_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.includedServers_ = includedServers_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.getDefaultInstance()) return this; + if (!other.includedServers_.isEmpty()) { + if (includedServers_.isEmpty()) { + includedServers_ = other.includedServers_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureIncludedServersIsMutable(); + includedServers_.addAll(other.includedServers_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated string includedServers = 1; + private com.google.protobuf.LazyStringList includedServers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureIncludedServersIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + includedServers_ = new com.google.protobuf.LazyStringArrayList(includedServers_); + bitField0_ |= 0x00000001; + } + } + /** + * repeated string includedServers = 1; + */ + public java.util.List + getIncludedServersList() { + return java.util.Collections.unmodifiableList(includedServers_); + } + /** + * repeated string includedServers = 1; + */ + public int getIncludedServersCount() { + return includedServers_.size(); + } + /** + * repeated string includedServers = 1; + */ + public java.lang.String getIncludedServers(int index) { + return includedServers_.get(index); + } + /** + * repeated string includedServers = 1; + */ + public com.google.protobuf.ByteString + getIncludedServersBytes(int index) { + return includedServers_.getByteString(index); + } + /** + * repeated string includedServers = 1; + */ + public Builder setIncludedServers( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureIncludedServersIsMutable(); + includedServers_.set(index, value); + onChanged(); + return this; + } + /** + * repeated string includedServers = 1; + */ + public Builder addIncludedServers( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureIncludedServersIsMutable(); + includedServers_.add(value); + onChanged(); + return this; + } + /** + * repeated string includedServers = 1; + */ + public Builder addAllIncludedServers( + java.lang.Iterable values) { + ensureIncludedServersIsMutable(); + super.addAll(values, includedServers_); + onChanged(); + return this; + } + /** + * repeated string includedServers = 1; + */ + public Builder clearIncludedServers() { + includedServers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * repeated string includedServers = 1; + */ + public Builder addIncludedServersBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureIncludedServersIsMutable(); + includedServers_.add(value); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetIncludedRegionServersResponse) + } + + static { + defaultInstance = new GetIncludedRegionServersResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetIncludedRegionServersResponse) + } + + public interface AddRegionServerToExclusionListRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string hostName = 1; + /** + * optional string hostName = 1; + */ + boolean hasHostName(); + /** + * optional string hostName = 1; + */ + java.lang.String getHostName(); + /** + * optional string hostName = 1; + */ + com.google.protobuf.ByteString + getHostNameBytes(); + } + /** + * Protobuf type {@code AddRegionServerToExclusionListRequest} + */ + public static final class AddRegionServerToExclusionListRequest extends + com.google.protobuf.GeneratedMessage + implements AddRegionServerToExclusionListRequestOrBuilder { + // Use AddRegionServerToExclusionListRequest.newBuilder() to construct. + private AddRegionServerToExclusionListRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private AddRegionServerToExclusionListRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final AddRegionServerToExclusionListRequest defaultInstance; + public static AddRegionServerToExclusionListRequest getDefaultInstance() { + return defaultInstance; + } + + public AddRegionServerToExclusionListRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AddRegionServerToExclusionListRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + hostName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AddRegionServerToExclusionListRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddRegionServerToExclusionListRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional string hostName = 1; + public static final int HOSTNAME_FIELD_NUMBER = 1; + private java.lang.Object hostName_; + /** + * optional string hostName = 1; + */ + public boolean hasHostName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string hostName = 1; + */ + public java.lang.String getHostName() { + java.lang.Object ref = hostName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + hostName_ = s; + } + return s; + } + } + /** + * optional string hostName = 1; + */ + public com.google.protobuf.ByteString + getHostNameBytes() { + java.lang.Object ref = hostName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + hostName_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getHostNameBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getHostNameBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest) obj; + + boolean result = true; + result = result && (hasHostName() == other.hasHostName()); + if (hasHostName()) { + result = result && getHostName() + .equals(other.getHostName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasHostName()) { + hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; + hash = (53 * hash) + getHostName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code AddRegionServerToExclusionListRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + hostName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.hostName_ = hostName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.getDefaultInstance()) return this; + if (other.hasHostName()) { + bitField0_ |= 0x00000001; + hostName_ = other.hostName_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional string hostName = 1; + private java.lang.Object hostName_ = ""; + /** + * optional string hostName = 1; + */ + public boolean hasHostName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string hostName = 1; + */ + public java.lang.String getHostName() { + java.lang.Object ref = hostName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + hostName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string hostName = 1; + */ + public com.google.protobuf.ByteString + getHostNameBytes() { + java.lang.Object ref = hostName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string hostName = 1; + */ + public Builder setHostName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + hostName_ = value; + onChanged(); + return this; + } + /** + * optional string hostName = 1; + */ + public Builder clearHostName() { + bitField0_ = (bitField0_ & ~0x00000001); + hostName_ = getDefaultInstance().getHostName(); + onChanged(); + return this; + } + /** + * optional string hostName = 1; + */ + public Builder setHostNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + hostName_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:AddRegionServerToExclusionListRequest) + } + + static { + defaultInstance = new AddRegionServerToExclusionListRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddRegionServerToExclusionListRequest) + } + + public interface AddRegionServerToExclusionListResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code AddRegionServerToExclusionListResponse} + */ + public static final class AddRegionServerToExclusionListResponse extends + com.google.protobuf.GeneratedMessage + implements AddRegionServerToExclusionListResponseOrBuilder { + // Use AddRegionServerToExclusionListResponse.newBuilder() to construct. + private AddRegionServerToExclusionListResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private AddRegionServerToExclusionListResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final AddRegionServerToExclusionListResponse defaultInstance; + public static AddRegionServerToExclusionListResponse getDefaultInstance() { + return defaultInstance; + } + + public AddRegionServerToExclusionListResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AddRegionServerToExclusionListResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AddRegionServerToExclusionListResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddRegionServerToExclusionListResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code AddRegionServerToExclusionListResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToExclusionListResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:AddRegionServerToExclusionListResponse) + } + + static { + defaultInstance = new AddRegionServerToExclusionListResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddRegionServerToExclusionListResponse) + } + + public interface AddRegionServerToInclusionListRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string hostName = 1; + /** + * optional string hostName = 1; + */ + boolean hasHostName(); + /** + * optional string hostName = 1; + */ + java.lang.String getHostName(); + /** + * optional string hostName = 1; + */ + com.google.protobuf.ByteString + getHostNameBytes(); + } + /** + * Protobuf type {@code AddRegionServerToInclusionListRequest} + */ + public static final class AddRegionServerToInclusionListRequest extends + com.google.protobuf.GeneratedMessage + implements AddRegionServerToInclusionListRequestOrBuilder { + // Use AddRegionServerToInclusionListRequest.newBuilder() to construct. + private AddRegionServerToInclusionListRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private AddRegionServerToInclusionListRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final AddRegionServerToInclusionListRequest defaultInstance; + public static AddRegionServerToInclusionListRequest getDefaultInstance() { + return defaultInstance; + } + + public AddRegionServerToInclusionListRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AddRegionServerToInclusionListRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + hostName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AddRegionServerToInclusionListRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddRegionServerToInclusionListRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional string hostName = 1; + public static final int HOSTNAME_FIELD_NUMBER = 1; + private java.lang.Object hostName_; + /** + * optional string hostName = 1; + */ + public boolean hasHostName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string hostName = 1; + */ + public java.lang.String getHostName() { + java.lang.Object ref = hostName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + hostName_ = s; + } + return s; + } + } + /** + * optional string hostName = 1; + */ + public com.google.protobuf.ByteString + getHostNameBytes() { + java.lang.Object ref = hostName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + hostName_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getHostNameBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getHostNameBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest) obj; + + boolean result = true; + result = result && (hasHostName() == other.hasHostName()); + if (hasHostName()) { + result = result && getHostName() + .equals(other.getHostName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasHostName()) { + hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; + hash = (53 * hash) + getHostName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code AddRegionServerToInclusionListRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + hostName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.hostName_ = hostName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.getDefaultInstance()) return this; + if (other.hasHostName()) { + bitField0_ |= 0x00000001; + hostName_ = other.hostName_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional string hostName = 1; + private java.lang.Object hostName_ = ""; + /** + * optional string hostName = 1; + */ + public boolean hasHostName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string hostName = 1; + */ + public java.lang.String getHostName() { + java.lang.Object ref = hostName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + hostName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string hostName = 1; + */ + public com.google.protobuf.ByteString + getHostNameBytes() { + java.lang.Object ref = hostName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string hostName = 1; + */ + public Builder setHostName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + hostName_ = value; + onChanged(); + return this; + } + /** + * optional string hostName = 1; + */ + public Builder clearHostName() { + bitField0_ = (bitField0_ & ~0x00000001); + hostName_ = getDefaultInstance().getHostName(); + onChanged(); + return this; + } + /** + * optional string hostName = 1; + */ + public Builder setHostNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + hostName_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:AddRegionServerToInclusionListRequest) + } + + static { + defaultInstance = new AddRegionServerToInclusionListRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddRegionServerToInclusionListRequest) + } + + public interface AddRegionServerToInclusionListResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code AddRegionServerToInclusionListResponse} + */ + public static final class AddRegionServerToInclusionListResponse extends + com.google.protobuf.GeneratedMessage + implements AddRegionServerToInclusionListResponseOrBuilder { + // Use AddRegionServerToInclusionListResponse.newBuilder() to construct. + private AddRegionServerToInclusionListResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private AddRegionServerToInclusionListResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final AddRegionServerToInclusionListResponse defaultInstance; + public static AddRegionServerToInclusionListResponse getDefaultInstance() { + return defaultInstance; + } + + public AddRegionServerToInclusionListResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private AddRegionServerToInclusionListResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public AddRegionServerToInclusionListResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AddRegionServerToInclusionListResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code AddRegionServerToInclusionListResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddRegionServerToInclusionListResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:AddRegionServerToInclusionListResponse) + } + + static { + defaultInstance = new AddRegionServerToInclusionListResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddRegionServerToInclusionListResponse) + } + /** * Protobuf service {@code MasterService} */ @@ -41176,6 +44610,38 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request, com.google.protobuf.RpcCallback done); + /** + * rpc GetExcludedRegionsServers(.GetExcludedRegionsServersRequest) returns (.GetExcludedRegionsServersResponse); + */ + public abstract void getExcludedRegionsServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetIncludedRegionServers(.GetIncludedRegionServersRequest) returns (.GetIncludedRegionServersResponse); + */ + public abstract void getIncludedRegionServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc AddRegionServerToExclusionList(.AddRegionServerToExclusionListRequest) returns (.AddRegionServerToExclusionListResponse); + */ + public abstract void addRegionServerToExclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc AddRegionServerToInclusionList(.AddRegionServerToInclusionListRequest) returns (.AddRegionServerToInclusionListResponse); + */ + public abstract void addRegionServerToInclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest request, + com.google.protobuf.RpcCallback done); + } public static com.google.protobuf.Service newReflectiveService( @@ -41525,6 +44991,38 @@ public final class MasterProtos { impl.listTableNamesByNamespace(controller, request, done); } + @java.lang.Override + public void getExcludedRegionsServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest request, + com.google.protobuf.RpcCallback done) { + impl.getExcludedRegionsServers(controller, request, done); + } + + @java.lang.Override + public void getIncludedRegionServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest request, + com.google.protobuf.RpcCallback done) { + impl.getIncludedRegionServers(controller, request, done); + } + + @java.lang.Override + public void addRegionServerToExclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest request, + com.google.protobuf.RpcCallback done) { + impl.addRegionServerToExclusionList(controller, request, done); + } + + @java.lang.Override + public void addRegionServerToInclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest request, + com.google.protobuf.RpcCallback done) { + impl.addRegionServerToInclusionList(controller, request, done); + } + }; } @@ -41633,6 +45131,14 @@ public final class MasterProtos { return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request); case 42: return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request); + case 43: + return impl.getExcludedRegionsServers(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest)request); + case 44: + return impl.getIncludedRegionServers(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest)request); + case 45: + return impl.addRegionServerToExclusionList(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest)request); + case 46: + return impl.addRegionServerToInclusionList(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } @@ -41733,6 +45239,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); case 42: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); + case 43: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.getDefaultInstance(); + case 44: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.getDefaultInstance(); + case 45: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.getDefaultInstance(); + case 46: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -41833,6 +45347,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); case 42: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); + case 43: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.getDefaultInstance(); + case 44: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.getDefaultInstance(); + case 45: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.getDefaultInstance(); + case 46: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -42383,6 +45905,38 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request, com.google.protobuf.RpcCallback done); + /** + * rpc GetExcludedRegionsServers(.GetExcludedRegionsServersRequest) returns (.GetExcludedRegionsServersResponse); + */ + public abstract void getExcludedRegionsServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc GetIncludedRegionServers(.GetIncludedRegionServersRequest) returns (.GetIncludedRegionServersResponse); + */ + public abstract void getIncludedRegionServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc AddRegionServerToExclusionList(.AddRegionServerToExclusionListRequest) returns (.AddRegionServerToExclusionListResponse); + */ + public abstract void addRegionServerToExclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc AddRegionServerToInclusionList(.AddRegionServerToInclusionListRequest) returns (.AddRegionServerToInclusionListResponse); + */ + public abstract void addRegionServerToInclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest request, + com.google.protobuf.RpcCallback done); + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -42620,6 +46174,26 @@ public final class MasterProtos { com.google.protobuf.RpcUtil.specializeCallback( done)); return; + case 43: + this.getExcludedRegionsServers(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 44: + this.getIncludedRegionServers(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 45: + this.addRegionServerToExclusionList(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 46: + this.addRegionServerToInclusionList(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; default: throw new java.lang.AssertionError("Can't get here."); } @@ -42720,6 +46294,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance(); case 42: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance(); + case 43: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest.getDefaultInstance(); + case 44: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest.getDefaultInstance(); + case 45: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest.getDefaultInstance(); + case 46: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -42820,6 +46402,14 @@ public final class MasterProtos { return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(); case 42: return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(); + case 43: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.getDefaultInstance(); + case 44: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.getDefaultInstance(); + case 45: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.getDefaultInstance(); + case 46: + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -43485,6 +47075,66 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance())); } + + public void getExcludedRegionsServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(43), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.getDefaultInstance())); + } + + public void getIncludedRegionServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(44), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.getDefaultInstance())); + } + + public void addRegionServerToExclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(45), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.getDefaultInstance())); + } + + public void addRegionServerToInclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(46), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.getDefaultInstance())); + } } public static BlockingInterface newBlockingStub( @@ -43707,6 +47357,26 @@ public final class MasterProtos { com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request) throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse getExcludedRegionsServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse getIncludedRegionServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse addRegionServerToExclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse addRegionServerToInclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest request) + throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { @@ -44231,6 +47901,54 @@ public final class MasterProtos { org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance()); } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse getExcludedRegionsServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(43), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse getIncludedRegionServers( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(44), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse addRegionServerToExclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(45), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse addRegionServerToInclusionList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(46), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse.getDefaultInstance()); + } + } // @@protoc_insertion_point(class_scope:MasterService) @@ -44646,6 +48364,46 @@ public final class MasterProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsProcedureDoneResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetExcludedRegionsServersRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetExcludedRegionsServersRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetExcludedRegionsServersResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetExcludedRegionsServersResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetIncludedRegionServersRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetIncludedRegionServersRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetIncludedRegionServersResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetIncludedRegionServersResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddRegionServerToExclusionListRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddRegionServerToExclusionListRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddRegionServerToExclusionListResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddRegionServerToExclusionListResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddRegionServerToInclusionListRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddRegionServerToInclusionListRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddRegionServerToInclusionListResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddRegionServerToInclusionListResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -44763,86 +48521,106 @@ public final class MasterProtos { "IsProcedureDoneRequest\022(\n\tprocedure\030\001 \001(" + "\0132\025.ProcedureDescription\"W\n\027IsProcedureD" + "oneResponse\022\023\n\004done\030\001 \001(\010:\005false\022\'\n\010snap" + - "shot\030\002 \001(\0132\025.ProcedureDescription2\365\027\n\rMa" + - "sterService\022S\n\024GetSchemaAlterStatus\022\034.Ge", - "tSchemaAlterStatusRequest\032\035.GetSchemaAlt" + - "erStatusResponse\022P\n\023GetTableDescriptors\022" + - "\033.GetTableDescriptorsRequest\032\034.GetTableD" + - "escriptorsResponse\022>\n\rGetTableNames\022\025.Ge" + - "tTableNamesRequest\032\026.GetTableNamesRespon" + - "se\022G\n\020GetClusterStatus\022\030.GetClusterStatu" + - "sRequest\032\031.GetClusterStatusResponse\022D\n\017I" + - "sMasterRunning\022\027.IsMasterRunningRequest\032" + - "\030.IsMasterRunningResponse\0222\n\tAddColumn\022\021" + - ".AddColumnRequest\032\022.AddColumnResponse\022;\n", - "\014DeleteColumn\022\024.DeleteColumnRequest\032\025.De" + - "leteColumnResponse\022;\n\014ModifyColumn\022\024.Mod" + - "ifyColumnRequest\032\025.ModifyColumnResponse\022" + - "5\n\nMoveRegion\022\022.MoveRegionRequest\032\023.Move" + - "RegionResponse\022Y\n\026DispatchMergingRegions" + - "\022\036.DispatchMergingRegionsRequest\032\037.Dispa" + - "tchMergingRegionsResponse\022;\n\014AssignRegio" + - "n\022\024.AssignRegionRequest\032\025.AssignRegionRe" + - "sponse\022A\n\016UnassignRegion\022\026.UnassignRegio" + - "nRequest\032\027.UnassignRegionResponse\022>\n\rOff", - "lineRegion\022\025.OfflineRegionRequest\032\026.Offl" + - "ineRegionResponse\0228\n\013DeleteTable\022\023.Delet" + - "eTableRequest\032\024.DeleteTableResponse\022>\n\rt" + - "runcateTable\022\025.TruncateTableRequest\032\026.Tr" + - "uncateTableResponse\0228\n\013EnableTable\022\023.Ena" + - "bleTableRequest\032\024.EnableTableResponse\022;\n" + - "\014DisableTable\022\024.DisableTableRequest\032\025.Di" + - "sableTableResponse\0228\n\013ModifyTable\022\023.Modi" + - "fyTableRequest\032\024.ModifyTableResponse\0228\n\013" + - "CreateTable\022\023.CreateTableRequest\032\024.Creat", - "eTableResponse\022/\n\010Shutdown\022\020.ShutdownReq" + - "uest\032\021.ShutdownResponse\0225\n\nStopMaster\022\022." + - "StopMasterRequest\032\023.StopMasterResponse\022," + - "\n\007Balance\022\017.BalanceRequest\032\020.BalanceResp" + - "onse\022M\n\022SetBalancerRunning\022\032.SetBalancer" + - "RunningRequest\032\033.SetBalancerRunningRespo" + - "nse\022A\n\016RunCatalogScan\022\026.RunCatalogScanRe" + - "quest\032\027.RunCatalogScanResponse\022S\n\024Enable" + - "CatalogJanitor\022\034.EnableCatalogJanitorReq" + - "uest\032\035.EnableCatalogJanitorResponse\022\\\n\027I", - "sCatalogJanitorEnabled\022\037.IsCatalogJanito" + - "rEnabledRequest\032 .IsCatalogJanitorEnable" + - "dResponse\022L\n\021ExecMasterService\022\032.Coproce" + - "ssorServiceRequest\032\033.CoprocessorServiceR" + - "esponse\022/\n\010Snapshot\022\020.SnapshotRequest\032\021." + - "SnapshotResponse\022V\n\025GetCompletedSnapshot" + - "s\022\035.GetCompletedSnapshotsRequest\032\036.GetCo" + - "mpletedSnapshotsResponse\022A\n\016DeleteSnapsh" + - "ot\022\026.DeleteSnapshotRequest\032\027.DeleteSnaps" + - "hotResponse\022A\n\016IsSnapshotDone\022\026.IsSnapsh", - "otDoneRequest\032\027.IsSnapshotDoneResponse\022D" + - "\n\017RestoreSnapshot\022\027.RestoreSnapshotReque" + - "st\032\030.RestoreSnapshotResponse\022V\n\025IsRestor" + - "eSnapshotDone\022\035.IsRestoreSnapshotDoneReq" + - "uest\032\036.IsRestoreSnapshotDoneResponse\022>\n\r" + - "ExecProcedure\022\025.ExecProcedureRequest\032\026.E" + - "xecProcedureResponse\022E\n\024ExecProcedureWit" + - "hRet\022\025.ExecProcedureRequest\032\026.ExecProced" + - "ureResponse\022D\n\017IsProcedureDone\022\027.IsProce" + - "dureDoneRequest\032\030.IsProcedureDoneRespons", - "e\022D\n\017ModifyNamespace\022\027.ModifyNamespaceRe" + - "quest\032\030.ModifyNamespaceResponse\022D\n\017Creat" + - "eNamespace\022\027.CreateNamespaceRequest\032\030.Cr" + - "eateNamespaceResponse\022D\n\017DeleteNamespace" + - "\022\027.DeleteNamespaceRequest\032\030.DeleteNamesp" + - "aceResponse\022Y\n\026GetNamespaceDescriptor\022\036." + - "GetNamespaceDescriptorRequest\032\037.GetNames" + - "paceDescriptorResponse\022_\n\030ListNamespaceD" + - "escriptors\022 .ListNamespaceDescriptorsReq" + - "uest\032!.ListNamespaceDescriptorsResponse\022", - "t\n\037ListTableDescriptorsByNamespace\022\'.Lis" + - "tTableDescriptorsByNamespaceRequest\032(.Li" + - "stTableDescriptorsByNamespaceResponse\022b\n" + - "\031ListTableNamesByNamespace\022!.ListTableNa" + - "mesByNamespaceRequest\032\".ListTableNamesBy" + - "NamespaceResponseBB\n*org.apache.hadoop.h" + - "base.protobuf.generatedB\014MasterProtosH\001\210" + - "\001\001\240\001\001" + "shot\030\002 \001(\0132\025.ProcedureDescription\"\"\n Get" + + "ExcludedRegionsServersRequest\"<\n!GetExcl", + "udedRegionsServersResponse\022\027\n\017excludedSe" + + "rvers\030\001 \003(\t\"!\n\037GetIncludedRegionServersR" + + "equest\";\n GetIncludedRegionServersRespon" + + "se\022\027\n\017includedServers\030\001 \003(\t\"9\n%AddRegion" + + "ServerToExclusionListRequest\022\020\n\010hostName" + + "\030\001 \001(\t\"(\n&AddRegionServerToExclusionList" + + "Response\"9\n%AddRegionServerToInclusionLi" + + "stRequest\022\020\n\010hostName\030\001 \001(\t\"(\n&AddRegion" + + "ServerToInclusionListResponse2\240\033\n\rMaster" + + "Service\022S\n\024GetSchemaAlterStatus\022\034.GetSch", + "emaAlterStatusRequest\032\035.GetSchemaAlterSt" + + "atusResponse\022P\n\023GetTableDescriptors\022\033.Ge" + + "tTableDescriptorsRequest\032\034.GetTableDescr" + + "iptorsResponse\022>\n\rGetTableNames\022\025.GetTab" + + "leNamesRequest\032\026.GetTableNamesResponse\022G" + + "\n\020GetClusterStatus\022\030.GetClusterStatusReq" + + "uest\032\031.GetClusterStatusResponse\022D\n\017IsMas" + + "terRunning\022\027.IsMasterRunningRequest\032\030.Is" + + "MasterRunningResponse\0222\n\tAddColumn\022\021.Add" + + "ColumnRequest\032\022.AddColumnResponse\022;\n\014Del", + "eteColumn\022\024.DeleteColumnRequest\032\025.Delete" + + "ColumnResponse\022;\n\014ModifyColumn\022\024.ModifyC" + + "olumnRequest\032\025.ModifyColumnResponse\0225\n\nM" + + "oveRegion\022\022.MoveRegionRequest\032\023.MoveRegi" + + "onResponse\022Y\n\026DispatchMergingRegions\022\036.D" + + "ispatchMergingRegionsRequest\032\037.DispatchM" + + "ergingRegionsResponse\022;\n\014AssignRegion\022\024." + + "AssignRegionRequest\032\025.AssignRegionRespon" + + "se\022A\n\016UnassignRegion\022\026.UnassignRegionReq" + + "uest\032\027.UnassignRegionResponse\022>\n\rOffline", + "Region\022\025.OfflineRegionRequest\032\026.OfflineR" + + "egionResponse\0228\n\013DeleteTable\022\023.DeleteTab" + + "leRequest\032\024.DeleteTableResponse\022>\n\rtrunc" + + "ateTable\022\025.TruncateTableRequest\032\026.Trunca" + + "teTableResponse\0228\n\013EnableTable\022\023.EnableT" + + "ableRequest\032\024.EnableTableResponse\022;\n\014Dis" + + "ableTable\022\024.DisableTableRequest\032\025.Disabl" + + "eTableResponse\0228\n\013ModifyTable\022\023.ModifyTa" + + "bleRequest\032\024.ModifyTableResponse\0228\n\013Crea" + + "teTable\022\023.CreateTableRequest\032\024.CreateTab", + "leResponse\022/\n\010Shutdown\022\020.ShutdownRequest" + + "\032\021.ShutdownResponse\0225\n\nStopMaster\022\022.Stop" + + "MasterRequest\032\023.StopMasterResponse\022,\n\007Ba" + + "lance\022\017.BalanceRequest\032\020.BalanceResponse" + + "\022M\n\022SetBalancerRunning\022\032.SetBalancerRunn" + + "ingRequest\032\033.SetBalancerRunningResponse\022" + + "A\n\016RunCatalogScan\022\026.RunCatalogScanReques" + + "t\032\027.RunCatalogScanResponse\022S\n\024EnableCata" + + "logJanitor\022\034.EnableCatalogJanitorRequest" + + "\032\035.EnableCatalogJanitorResponse\022\\\n\027IsCat", + "alogJanitorEnabled\022\037.IsCatalogJanitorEna" + + "bledRequest\032 .IsCatalogJanitorEnabledRes" + + "ponse\022L\n\021ExecMasterService\022\032.Coprocessor" + + "ServiceRequest\032\033.CoprocessorServiceRespo" + + "nse\022/\n\010Snapshot\022\020.SnapshotRequest\032\021.Snap" + + "shotResponse\022V\n\025GetCompletedSnapshots\022\035." + + "GetCompletedSnapshotsRequest\032\036.GetComple" + + "tedSnapshotsResponse\022A\n\016DeleteSnapshot\022\026" + + ".DeleteSnapshotRequest\032\027.DeleteSnapshotR" + + "esponse\022A\n\016IsSnapshotDone\022\026.IsSnapshotDo", + "neRequest\032\027.IsSnapshotDoneResponse\022D\n\017Re" + + "storeSnapshot\022\027.RestoreSnapshotRequest\032\030" + + ".RestoreSnapshotResponse\022V\n\025IsRestoreSna" + + "pshotDone\022\035.IsRestoreSnapshotDoneRequest" + + "\032\036.IsRestoreSnapshotDoneResponse\022>\n\rExec" + + "Procedure\022\025.ExecProcedureRequest\032\026.ExecP" + + "rocedureResponse\022E\n\024ExecProcedureWithRet" + + "\022\025.ExecProcedureRequest\032\026.ExecProcedureR" + + "esponse\022D\n\017IsProcedureDone\022\027.IsProcedure" + + "DoneRequest\032\030.IsProcedureDoneResponse\022D\n", + "\017ModifyNamespace\022\027.ModifyNamespaceReques" + + "t\032\030.ModifyNamespaceResponse\022D\n\017CreateNam" + + "espace\022\027.CreateNamespaceRequest\032\030.Create" + + "NamespaceResponse\022D\n\017DeleteNamespace\022\027.D" + + "eleteNamespaceRequest\032\030.DeleteNamespaceR" + + "esponse\022Y\n\026GetNamespaceDescriptor\022\036.GetN" + + "amespaceDescriptorRequest\032\037.GetNamespace" + + "DescriptorResponse\022_\n\030ListNamespaceDescr" + + "iptors\022 .ListNamespaceDescriptorsRequest" + + "\032!.ListNamespaceDescriptorsResponse\022t\n\037L", + "istTableDescriptorsByNamespace\022\'.ListTab" + + "leDescriptorsByNamespaceRequest\032(.ListTa" + + "bleDescriptorsByNamespaceResponse\022b\n\031Lis" + + "tTableNamesByNamespace\022!.ListTableNamesB" + + "yNamespaceRequest\032\".ListTableNamesByName" + + "spaceResponse\022b\n\031GetExcludedRegionsServe" + + "rs\022!.GetExcludedRegionsServersRequest\032\"." + + "GetExcludedRegionsServersResponse\022_\n\030Get" + + "IncludedRegionServers\022 .GetIncludedRegio" + + "nServersRequest\032!.GetIncludedRegionServe", + "rsResponse\022q\n\036AddRegionServerToExclusion" + + "List\022&.AddRegionServerToExclusionListReq" + + "uest\032\'.AddRegionServerToExclusionListRes" + + "ponse\022q\n\036AddRegionServerToInclusionList\022" + + "&.AddRegionServerToInclusionListRequest\032" + + "\'.AddRegionServerToInclusionListResponse" + + "BB\n*org.apache.hadoop.hbase.protobuf.gen" + + "eratedB\014MasterProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -45341,6 +49119,54 @@ public final class MasterProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsProcedureDoneResponse_descriptor, new java.lang.String[] { "Done", "Snapshot", }); + internal_static_GetExcludedRegionsServersRequest_descriptor = + getDescriptor().getMessageTypes().get(82); + internal_static_GetExcludedRegionsServersRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetExcludedRegionsServersRequest_descriptor, + new java.lang.String[] { }); + internal_static_GetExcludedRegionsServersResponse_descriptor = + getDescriptor().getMessageTypes().get(83); + internal_static_GetExcludedRegionsServersResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetExcludedRegionsServersResponse_descriptor, + new java.lang.String[] { "ExcludedServers", }); + internal_static_GetIncludedRegionServersRequest_descriptor = + getDescriptor().getMessageTypes().get(84); + internal_static_GetIncludedRegionServersRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetIncludedRegionServersRequest_descriptor, + new java.lang.String[] { }); + internal_static_GetIncludedRegionServersResponse_descriptor = + getDescriptor().getMessageTypes().get(85); + internal_static_GetIncludedRegionServersResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetIncludedRegionServersResponse_descriptor, + new java.lang.String[] { "IncludedServers", }); + internal_static_AddRegionServerToExclusionListRequest_descriptor = + getDescriptor().getMessageTypes().get(86); + internal_static_AddRegionServerToExclusionListRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddRegionServerToExclusionListRequest_descriptor, + new java.lang.String[] { "HostName", }); + internal_static_AddRegionServerToExclusionListResponse_descriptor = + getDescriptor().getMessageTypes().get(87); + internal_static_AddRegionServerToExclusionListResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddRegionServerToExclusionListResponse_descriptor, + new java.lang.String[] { }); + internal_static_AddRegionServerToInclusionListRequest_descriptor = + getDescriptor().getMessageTypes().get(88); + internal_static_AddRegionServerToInclusionListRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddRegionServerToInclusionListRequest_descriptor, + new java.lang.String[] { "HostName", }); + internal_static_AddRegionServerToInclusionListResponse_descriptor = + getDescriptor().getMessageTypes().get(89); + internal_static_AddRegionServerToInclusionListResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddRegionServerToInclusionListResponse_descriptor, + new java.lang.String[] { }); return null; } }; diff --git hbase-protocol/src/main/protobuf/Master.proto hbase-protocol/src/main/protobuf/Master.proto index 94ea860..08ceebb 100644 --- hbase-protocol/src/main/protobuf/Master.proto +++ hbase-protocol/src/main/protobuf/Master.proto @@ -358,6 +358,34 @@ message IsProcedureDoneResponse { optional ProcedureDescription snapshot = 2; } +message GetExcludedRegionsServersRequest { +} + +message GetExcludedRegionsServersResponse { + repeated string excludedServers = 1; +} + +message GetIncludedRegionServersRequest { +} + +message GetIncludedRegionServersResponse { + repeated string includedServers = 1; +} + +message AddRegionServerToExclusionListRequest { + required string hostName = 1; +} + +message AddRegionServerToExclusionListResponse { +} + +message AddRegionServerToInclusionListRequest { + required string hostName = 1; +} + +message AddRegionServerToInclusionListResponse { +} + service MasterService { /** Used by the client to get the number of regions that have received the updated schema */ rpc GetSchemaAlterStatus(GetSchemaAlterStatusRequest) @@ -565,4 +593,16 @@ service MasterService { /** returns a list of tables for a given namespace*/ rpc ListTableNamesByNamespace(ListTableNamesByNamespaceRequest) returns(ListTableNamesByNamespaceResponse); + + rpc GetExcludedRegionsServers(GetExcludedRegionsServersRequest) + returns(GetExcludedRegionsServersResponse); + + rpc GetIncludedRegionServers(GetIncludedRegionServersRequest) + returns(GetIncludedRegionServersResponse); + + rpc AddRegionServerToExclusionList(AddRegionServerToExclusionListRequest) + returns(AddRegionServerToExclusionListResponse); + + rpc AddRegionServerToInclusionList(AddRegionServerToInclusionListRequest) + returns(AddRegionServerToInclusionListResponse); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/master/AllowedRegionServersAccessor.java hbase-server/src/main/java/org/apache/hadoop/hbase/master/AllowedRegionServersAccessor.java new file mode 100644 index 0000000..d197db9 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/AllowedRegionServersAccessor.java @@ -0,0 +1,177 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.master; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MetaTableAccessor; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.HConnection; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.util.Bytes; + +/** + * A class that provides access to internal meta table {@link #ALLOWED_RS_TABLE_STR} in the hbase + * reserved namespace. The table contains the data allowed/denied regions servers. + */ +public class AllowedRegionServersAccessor { + + public static final String ALLOWED_RS_TABLE_STR = "allowedRS"; + public static final byte[] ALLOWED_RS_TABLE_NAME = ALLOWED_RS_TABLE_STR.getBytes(); + public static final TableName ALLOWED_RS_TABLE = TableName.valueOf( + NamespaceDescriptor.SYSTEM_NAMESPACE_NAME, ALLOWED_RS_TABLE_NAME); + + private static final byte[] ALLOWED_RS_COL_FAMILY = "info".getBytes(); + private static final byte[] ALLOWED_RS_COLUMN_NAME = "type".getBytes(); + + public static final HTableDescriptor ALLOWED_RS_TABLE_DESC = new HTableDescriptor( + ALLOWED_RS_TABLE); + + static { + ALLOWED_RS_TABLE_DESC.addFamily(new HColumnDescriptor(ALLOWED_RS_COL_FAMILY, + 10, // Ten is arbitrary number. Keep versions to help debugging. + Compression.Algorithm.NONE.getName(), true, true, 8 * 1024, HConstants.FOREVER, + BloomType.NONE.toString(), HConstants.REPLICATION_SCOPE_LOCAL). + // Set cache data blocks in L1 if more than one cache tier deployed; e.g. this will + // be the case if we are using CombinedBlockCache (Bucket Cache). + setCacheDataInL1(true)); + } + + public static enum HostColumnType { + INCLUDED, EXCLUDED; + + private static final byte[] ALLOWED_RS_COL_VALUE = "a".getBytes(); + private static final byte[] DENIED_RS_COL_VALUE = "d".getBytes(); + + public static byte[] getColumnValue(HostColumnType type) { + switch (type) { + case INCLUDED: + return ALLOWED_RS_COL_VALUE; + case EXCLUDED: + return DENIED_RS_COL_VALUE; + default: + throw new IllegalArgumentException(); + } + } + + public static HostColumnType getType(byte[] value) { + if (Arrays.equals(ALLOWED_RS_COL_VALUE, value)) return INCLUDED; + else if (Arrays.equals(DENIED_RS_COL_VALUE, value)) return EXCLUDED; + throw new IllegalArgumentException(); + } + } + + @SuppressWarnings("deprecation") + private static HTableInterface getAllowedRegionsTable(HConnection connection) throws IOException { + return new HTable(ALLOWED_RS_TABLE, connection); + } + + public static void init(MasterServices master) throws IOException { + if (!MetaTableAccessor.tableExists(master.getShortCircuitConnection(), + AllowedRegionServersAccessor.ALLOWED_RS_TABLE)) master.createTable(ALLOWED_RS_TABLE_DESC, + null); + } + + public static Map> getHosts(HConnection connection) + throws IOException { + Map> resultMap = + new HashMap>(); + for (HostColumnType value : HostColumnType.values()) + resultMap.put(value, new ArrayList()); + + Scan scan = new Scan(); + HTableInterface allowedRSTableObj = getAllowedRegionsTable(connection); + Iterator resultItr = allowedRSTableObj.getScanner(scan).iterator(); + while (resultItr.hasNext()) { + Result result = resultItr.next(); + Cell cell = result.getColumnLatestCell(ALLOWED_RS_COL_FAMILY, ALLOWED_RS_COLUMN_NAME); + if (cell != null) { + resultMap.get(HostColumnType.getType(cell.getValueArray())).add( + Bytes.toString(cell.getRowArray())); + } + } + return resultMap; + } + + private static List getHosts(HConnection connection, byte[] columnValue) + throws IOException { + List hosts = new ArrayList(); + Scan scan = new Scan(); + HTableInterface allowedRSTableObj = getAllowedRegionsTable(connection); + if (columnValue != null) { + scan.setFilter(new SingleColumnValueFilter(ALLOWED_RS_COL_FAMILY, ALLOWED_RS_COLUMN_NAME, + CompareOp.EQUAL, columnValue)); + } + Iterator resultItr = allowedRSTableObj.getScanner(scan).iterator(); + while (resultItr.hasNext()) { + Result result = resultItr.next(); + hosts.add(Bytes.toString(result.getRow())); + } + return hosts; + } + + public static List getIncludedHosts(HConnection connection) throws IOException { + return getHosts(connection, HostColumnType.ALLOWED_RS_COL_VALUE); + } + + public static List getExcludedHosts(HConnection connection) throws IOException { + return getHosts(connection, HostColumnType.DENIED_RS_COL_VALUE); + } + + private static void addHostToTable(HConnection connection, String hostName, boolean allowed) + throws IOException { + HTableInterface allowedRSTableObj = getAllowedRegionsTable(connection); + Put put = new Put(hostName.getBytes()); + if (allowed) put.add(ALLOWED_RS_COL_FAMILY, ALLOWED_RS_COLUMN_NAME, + HostColumnType.ALLOWED_RS_COL_VALUE); + else put.add(ALLOWED_RS_COL_FAMILY, ALLOWED_RS_COLUMN_NAME, HostColumnType.DENIED_RS_COL_VALUE); + allowedRSTableObj.put(put); + allowedRSTableObj.close(); + } + + public static void addHostToInclusionList(HConnection connection, String hostName) + throws IOException { + addHostToTable(connection, hostName, true); + } + + public static void addHostToExclusionList(HConnection connection, String hostName) + throws IOException { + addHostToTable(connection, hostName, false); + } + +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 0c87e63..4c33250 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -46,6 +46,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.CoordinatedStateException; +import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -53,6 +54,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.PleaseHoldException; @@ -64,12 +66,10 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.UnknownRegionException; -import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.MetaScanner; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitorBase; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.executor.ExecutorType; @@ -100,8 +100,8 @@ import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.procedure.MasterProcedureManagerHost; import org.apache.hadoop.hbase.procedure.flush.MasterFlushTableProcedureManager; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo; -import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; +import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.RSRpcServices; import org.apache.hadoop.hbase.regionserver.RegionSplitPolicy; @@ -598,10 +598,12 @@ public class HMaster extends HRegionServer implements MasterServices, Server { LOG.error("Coprocessor preMasterInitialization() hook failed", e); } } + status.markComplete("Initialization successful"); LOG.info("Master has completed initialization"); initialized = true; + serverManager.initialize(); // clear the dead servers with same host name and port of online server because we are not // removing dead server with same hostname and port of rs which is trying to check in before // master initialization. See HBASE-5916. @@ -722,7 +724,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { tableNamespaceManager = new TableNamespaceManager(this); tableNamespaceManager.start(); } - + boolean isCatalogJanitorEnabled() { return catalogJanitorChore != null ? catalogJanitorChore.getEnabled() : false; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index d6f825b..37f6f5f 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -32,16 +32,17 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.PleaseHoldException; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownRegionException; -import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.exceptions.MergeRegionException; import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface; +import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.procedure.MasterProcedureManager; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -54,6 +55,10 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Re import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToExclusionListResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddRegionServerToInclusionListResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest; @@ -84,6 +89,10 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusR import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetExcludedRegionsServersResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetIncludedRegionServersResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; @@ -1271,4 +1280,60 @@ public class MasterRpcServices extends RSRpcServices throw new ServiceException(ioe); } } + + @Override + public GetExcludedRegionsServersResponse getExcludedRegionsServers(RpcController controller, + GetExcludedRegionsServersRequest request) throws ServiceException { + try { + master.checkServiceStarted(); + List excludedServers = master.serverManager.getExcludedRegionServers(); + return GetExcludedRegionsServersResponse.newBuilder().addAllExcludedServers(excludedServers) + .build(); + } catch (ServerNotRunningYetException snrye) { + throw new ServiceException(snrye); + } + } + + @Override + public GetIncludedRegionServersResponse getIncludedRegionServers(RpcController controller, + GetIncludedRegionServersRequest request) throws ServiceException { + try { + master.checkServiceStarted(); + List excludedServers = master.serverManager.getIncludedRegionServers(); + return GetIncludedRegionServersResponse.newBuilder().addAllIncludedServers(excludedServers) + .build(); + } catch (ServerNotRunningYetException snrye) { + throw new ServiceException(snrye); + } + } + + @Override + public AddRegionServerToExclusionListResponse addRegionServerToExclusionList( + RpcController controller, AddRegionServerToExclusionListRequest request) + throws ServiceException { + try { + master.checkServiceStarted(); + master.serverManager.addRegionServerToExclusionList(request.getHostName()); + return AddRegionServerToExclusionListResponse.newBuilder().build(); + } catch (ServerNotRunningYetException snrye) { + throw new ServiceException(snrye); + } catch (IOException e) { + throw new ServiceException(e); + } + } + + @Override + public AddRegionServerToInclusionListResponse addRegionServerToInclusionList( + RpcController controller, AddRegionServerToInclusionListRequest request) + throws ServiceException { + try { + master.checkServiceStarted(); + master.serverManager.addRegionServerToInclusionList(request.getHostName()); + return AddRegionServerToInclusionListResponse.newBuilder().build(); + } catch (ServerNotRunningYetException snrye) { + throw new ServiceException(snrye); + } catch (IOException e) { + throw new ServiceException(e); + } + } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java index 9390eba..9e0e923 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.ClockOutOfSyncException; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.ServerExcludedException; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.YouAreDeadException; @@ -49,6 +50,7 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.RetriesExhaustedException; +import org.apache.hadoop.hbase.master.AllowedRegionServersAccessor.HostColumnType; import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer; import org.apache.hadoop.hbase.master.handler.MetaServerShutdownHandler; import org.apache.hadoop.hbase.master.handler.ServerShutdownHandler; @@ -183,6 +185,13 @@ public class ServerManager { /** Listeners that are called on server events. */ private List listeners = new CopyOnWriteArrayList(); + + private final Set includedRegionServers = new HashSet(); + private final Set excludedRegionServers = new HashSet(); + private final Object includeExcludeLock = new Object(); + + private volatile boolean allowedRSAccessorInitialized = false; + private final boolean enableRSAccessor; /** * Constructor. @@ -202,6 +211,7 @@ public class ServerManager { Configuration c = master.getConfiguration(); maxSkew = c.getLong("hbase.master.maxclockskew", 30000); warningSkew = c.getLong("hbase.master.warningclockskew", 10000); + enableRSAccessor = c.getBoolean("hbase.master.enableallowedhostscheck", false); this.connection = connect ? HConnectionManager.getConnection(c) : null; // Put this in constructor so we don't cast it every time @@ -216,6 +226,90 @@ public class ServerManager { balancer = (BaseLoadBalancer)((HMaster)master).balancer; } } + + private boolean isServerExcluded(ServerName sn) { + if(!allowedRSAccessorInitialized) + return false; + synchronized (includeExcludeLock) { + boolean included = + includedRegionServers.isEmpty() + || (includedRegionServers.contains(sn.getHostname()) || includedRegionServers + .contains(sn.getHostAndPort())); + boolean excluded = + excludedRegionServers.contains(sn.getHostname()) + || excludedRegionServers.contains(sn.getHostAndPort()); + if (excluded) return true; + return !included; + } + } + + private void expireExcludedHost(String hostName) { + List onlineServersList = getOnlineServersList(); + List serversToExpire = new ArrayList(); + for (ServerName sn : onlineServersList) { + if (sn.getHostname().equals(hostName) || sn.getHostAndPort().equals(hostName)) serversToExpire + .add(sn); + } + for (ServerName sn : serversToExpire) { + LOG.info("Expiring server as the host has been added to exclusion list." + sn); + expireServer(sn); + } + } + + public List getIncludedRegionServers() { + synchronized (includeExcludeLock) { + return new ArrayList(includedRegionServers); + } + } + + public List getExcludedRegionServers() { + synchronized (includeExcludeLock) { + return new ArrayList(excludedRegionServers); + } + } + + public void addRegionServerToExclusionList(String hostName) throws IOException { + synchronized (includeExcludeLock) { + AllowedRegionServersAccessor.addHostToExclusionList(master.getShortCircuitConnection(), + hostName); + excludedRegionServers.add(hostName); + } + expireExcludedHost(hostName); + } + + public void addRegionServerToInclusionList(String hostName) throws IOException { + synchronized (includeExcludeLock) { + AllowedRegionServersAccessor.addHostToInclusionList(master.getShortCircuitConnection(), + hostName); + includedRegionServers.add(hostName); + } + } + + private void refreshAllowedRegionServers() throws IOException { + Map> hostsList = + AllowedRegionServersAccessor.getHosts(master.getShortCircuitConnection()); + synchronized (includeExcludeLock) { + includedRegionServers.clear(); + excludedRegionServers.clear(); + includedRegionServers.addAll(hostsList.get(HostColumnType.INCLUDED)); + excludedRegionServers.addAll(hostsList.get(HostColumnType.EXCLUDED)); + } + + for (String excludedHost : hostsList.get(HostColumnType.EXCLUDED)) + expireExcludedHost(excludedHost); + } + + void initialize() { + if (enableRSAccessor) { + try { + AllowedRegionServersAccessor.init(services); + refreshAllowedRegionServers(); + allowedRSAccessorInitialized = true; + } catch (IOException e) { + LOG.warn("Exception occurred while initializing allowed region servers accessor", e); + } + } else LOG.info("Inclusion/Exclusion check on region servers is not enabled."); + } /** * Add the listener to the notification list. @@ -288,8 +382,8 @@ public class ServerManager { } } - void regionServerReport(ServerName sn, - ServerLoad sl) throws YouAreDeadException { + void regionServerReport(ServerName sn, ServerLoad sl) throws YouAreDeadException, + ServerExcludedException { checkIsDead(sn, "REPORT"); if (null == this.onlineServers.replace(sn, sl)) { // Already have this host+port combo and its just different start code? @@ -314,8 +408,12 @@ public class ServerManager { * @param sl the server load on the server * @return true if the server is recorded, otherwise, false */ - boolean checkAndRecordNewServer( - final ServerName serverName, final ServerLoad sl) { + boolean checkAndRecordNewServer(final ServerName serverName, final ServerLoad sl) + throws ServerExcludedException { + + if (isServerExcluded(serverName)) throw new ServerExcludedException( + "Server is excluded from joining the cluster."); + ServerName existingServer = null; synchronized (this.onlineServers) { existingServer = findServerWithSameHostnamePortWithLock(serverName); @@ -1021,7 +1119,7 @@ public class ServerManager { public boolean isClusterShutdown() { return this.clusterShutdown; } - + /** * Stop the ServerManager. Currently closes the connection to the master. */ diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 24bb09a..294157e 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HealthCheckChore; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.ServerExcludedException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableDescriptors; @@ -2052,8 +2053,11 @@ public class HRegionServer extends HasThread implements throw ioe; } else if (ioe instanceof ServerNotRunningYetException) { LOG.debug("Master is not running yet"); + } else if (ioe instanceof ServerExcludedException) { + LOG.fatal("Master rejected this node since this node has been put in the excluded list."); + throw ioe; } else { - LOG.warn("error telling master we are up", se); + LOG.warn("error telling master we are up", se); } } return result; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAllowedRegionServers.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAllowedRegionServers.java new file mode 100644 index 0000000..16c1c76 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAllowedRegionServers.java @@ -0,0 +1,65 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.master; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.MiniHBaseCluster; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; +import org.junit.Assert; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(MediumTests.class) +public class TestAllowedRegionServers { + + @Test + public void testExcludeHost() throws Exception { + Configuration conf = HBaseConfiguration.create(); + conf.set("hbase.master.enableallowedhostscheck", "true"); + + HBaseTestingUtility obj = new HBaseTestingUtility(conf); + obj.startMiniCluster(1, 1); + + MiniHBaseCluster cluster = obj.getMiniHBaseCluster(); + RegionServerThread regionThread = cluster.startRegionServer(); + ServerName serverName = regionThread.getRegionServer().getServerName(); + + while (true) { + if (cluster.getMaster().initialized == false) Thread.sleep(1000); + break; + } + + Assert.assertTrue(cluster.getMaster().getServerManager().getOnlineServersList() + .contains(serverName)); + cluster.getMaster().getServerManager() + .addRegionServerToExclusionList(serverName.getHostAndPort()); + + for (int i = 0; i < 6; i++) { + Thread.sleep(6000); + if (!cluster.getMaster().getServerManager().getOnlineServersList().contains(serverName)) break; + } + + Assert.assertFalse(cluster.getMaster().getServerManager().getOnlineServersList() + .contains(serverName)); + } +}