diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index bbb831d..d5f2a07 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -17,8 +17,17 @@
*/
package org.apache.hadoop.hbase;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.ServiceException;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -49,17 +58,8 @@ import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.protobuf.ServiceException;
/**
* Read/write operations on region and assignment information store in
@@ -878,6 +878,32 @@ public class MetaTableAccessor {
}
/**
+ * A Visitor that skips offline regions and split parents
+ */
+ public static abstract class DefaultVisitorBase implements Visitor {
+
+ public DefaultVisitorBase() {
+ super();
+ }
+
+ public abstract boolean visitInternal(Result rowResult) throws IOException;
+
+ @Override
+ public boolean visit(Result rowResult) throws IOException {
+ HRegionInfo info = getHRegionInfo(rowResult);
+ if (info == null) {
+ return true;
+ }
+
+ //skip over offline and split regions
+ if (!(info.isOffline() || info.isSplit())) {
+ return visitInternal(rowResult);
+ }
+ return true;
+ }
+ }
+
+ /**
* Count regions in hbase:meta for passed table.
* @param c Configuration object
* @param tableName table name to count regions for
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java
index e7fbefb..973aa3f 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase;
import com.google.common.net.InetAddresses;
import com.google.protobuf.InvalidProtocolBufferException;
+import com.google.common.net.HostAndPort;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -87,9 +88,8 @@ public class ServerName implements Comparable, Serializable {
public static final String UNKNOWN_SERVERNAME = "#unknown#";
private final String servername;
- private final String hostnameOnly;
- private final int port;
private final long startcode;
+ private final HostAndPort hostAndPort;
/**
* Cached versioned bytes of this ServerName instance.
@@ -101,10 +101,9 @@ public class ServerName implements Comparable, Serializable {
private ServerName(final String hostname, final int port, final long startcode) {
// Drop the domain is there is one; no need of it in a local cluster. With it, we get long
// unwieldy names.
- this.hostnameOnly = hostname;
- this.port = port;
+ this.hostAndPort = HostAndPort.fromParts(hostname, port);
this.startcode = startcode;
- this.servername = getServerName(this.hostnameOnly, port, startcode);
+ this.servername = getServerName(hostname, port, startcode);
}
/**
@@ -188,7 +187,8 @@ public class ServerName implements Comparable, Serializable {
* in compares, etc.
*/
public String toShortString() {
- return Addressing.createHostAndPortStr(getHostNameMinusDomain(this.hostnameOnly), this.port);
+ return Addressing.createHostAndPortStr(
+ getHostNameMinusDomain(hostAndPort.getHostText()), hostAndPort.getPort());
}
/**
@@ -207,11 +207,11 @@ public class ServerName implements Comparable, Serializable {
}
public String getHostname() {
- return hostnameOnly;
+ return hostAndPort.getHostText();
}
public int getPort() {
- return port;
+ return hostAndPort.getPort();
}
public long getStartcode() {
@@ -255,7 +255,11 @@ public class ServerName implements Comparable, Serializable {
* {@link Addressing#createHostAndPortStr(String, int)}
*/
public String getHostAndPort() {
- return Addressing.createHostAndPortStr(this.hostnameOnly, this.port);
+ return Addressing.createHostAndPortStr(hostAndPort.getHostText(), hostAndPort.getPort());
+ }
+
+ public HostAndPort getHostPort() {
+ return hostAndPort;
}
/**
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/group/GroupAdmin.java hbase-client/src/main/java/org/apache/hadoop/hbase/group/GroupAdmin.java
new file mode 100644
index 0000000..36bfa65
--- /dev/null
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/group/GroupAdmin.java
@@ -0,0 +1,120 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.group;
+
+import com.google.common.net.HostAndPort;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.Connection;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Group user API interface used between client and server.
+ */
+@InterfaceAudience.Private
+public abstract class GroupAdmin implements Closeable {
+
+ /**
+ * Create a new GroupAdmin client
+ * @param conn
+ * @return a new GroupAdmin client
+ * @throws IOException
+ */
+ public static GroupAdmin newClient(Connection conn) throws IOException {
+ return new GroupAdminClient(conn);
+ }
+
+ /**
+ * Gets the group information.
+ *
+ * @param groupName the group name
+ * @return An instance of GroupInfo
+ */
+ public abstract GroupInfo getGroupInfo(String groupName) throws IOException;
+
+ /**
+ * Gets the group info of table.
+ *
+ * @param tableName the table name
+ * @return An instance of GroupInfo.
+ */
+ public abstract GroupInfo getGroupInfoOfTable(TableName tableName) throws IOException;
+
+ /**
+ * Move a set of serves to another group
+ *
+ *
+ * @param servers set of servers, must be in the form HOST:PORT
+ * @param targetGroup the target group
+ * @throws java.io.IOException Signals that an I/O exception has occurred.
+ */
+ public abstract void moveServers(Set servers, String targetGroup) throws IOException;
+
+ /**
+ * Move tables to a new group.
+ * This will unassign all of a table's region so it can be reassigned to the correct group.
+ * @param tables list of tables to move
+ * @param targetGroup target group
+ * @throws java.io.IOException
+ */
+ public abstract void moveTables(Set tables, String targetGroup) throws IOException;
+
+ /**
+ * Add a new group
+ * @param name name of the group
+ * @throws java.io.IOException
+ */
+ public abstract void addGroup(String name) throws IOException;
+
+ /**
+ * Remove a group
+ * @param name name of the group
+ * @throws java.io.IOException
+ */
+ public abstract void removeGroup(String name) throws IOException;
+
+ /**
+ * Balance the regions in a group
+ *
+ * @param name the name of the gorup to balance
+ * @return
+ * @throws java.io.IOException
+ */
+ public abstract boolean balanceGroup(String name) throws IOException;
+
+ /**
+ * Lists the existing groups.
+ *
+ * @return Collection of GroupInfo.
+ */
+ public abstract List listGroups() throws IOException;
+
+ /**
+ * Retrieve the GroupInfo a server is affiliated to
+ * @param hostPort
+ * @return
+ * @throws java.io.IOException
+ */
+ public abstract GroupInfo getGroupOfServer(HostAndPort hostPort) throws IOException;
+}
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/group/GroupAdminClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/group/GroupAdminClient.java
new file mode 100644
index 0000000..fae3de3
--- /dev/null
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/group/GroupAdminClient.java
@@ -0,0 +1,192 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.group;
+
+import com.google.common.collect.Sets;
+import com.google.common.net.HostAndPort;
+import com.google.protobuf.ServiceException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos;
+import org.apache.hadoop.hbase.protobuf.generated.GroupProtos;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Client used for managing region server group information.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+class GroupAdminClient extends GroupAdmin {
+ private GroupAdminProtos.GroupAdminService.BlockingInterface proxy;
+ private static final Log LOG = LogFactory.getLog(GroupAdminClient.class);
+
+ public GroupAdminClient(Connection conn) throws IOException {
+ proxy = GroupAdminProtos.GroupAdminService.newBlockingStub(
+ conn.getAdmin().coprocessorService());
+ }
+
+ @Override
+ public GroupInfo getGroupInfo(String groupName) throws IOException {
+ try {
+ GroupAdminProtos.GetGroupInfoResponse resp =
+ proxy.getGroupInfo(null,
+ GroupAdminProtos.GetGroupInfoRequest.newBuilder().setGroupName(groupName).build());
+ if(resp.hasGroupInfo()) {
+ return ProtobufUtil.toGroupInfo(resp.getGroupInfo());
+ }
+ return null;
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public GroupInfo getGroupInfoOfTable(TableName tableName) throws IOException {
+ GroupAdminProtos.GetGroupInfoOfTableRequest request =
+ GroupAdminProtos.GetGroupInfoOfTableRequest.newBuilder()
+ .setTableName(ProtobufUtil.toProtoTableName(tableName)).build();
+
+ try {
+ return ProtobufUtil.toGroupInfo(proxy.getGroupInfoOfTable(null, request).getGroupInfo());
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public void moveServers(Set servers, String targetGroup) throws IOException {
+ Set hostPorts = Sets.newHashSet();
+ for(HostAndPort el: servers) {
+ hostPorts.add(HBaseProtos.HostPort.newBuilder()
+ .setHostName(el.getHostText())
+ .setPort(el.getPort())
+ .build());
+ }
+ GroupAdminProtos.MoveServersRequest request =
+ GroupAdminProtos.MoveServersRequest.newBuilder()
+ .setTargetGroup(targetGroup)
+ .addAllServers(hostPorts).build();
+
+ try {
+ proxy.moveServers(null, request);
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public void moveTables(Set tables, String targetGroup) throws IOException {
+ GroupAdminProtos.MoveTablesRequest.Builder builder =
+ GroupAdminProtos.MoveTablesRequest.newBuilder()
+ .setTargetGroup(targetGroup);
+ for(TableName tableName: tables) {
+ builder.addTableName(ProtobufUtil.toProtoTableName(tableName));
+ }
+ try {
+ proxy.moveTables(null, builder.build());
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public void addGroup(String groupName) throws IOException {
+ GroupAdminProtos.AddGroupRequest request =
+ GroupAdminProtos.AddGroupRequest.newBuilder()
+ .setGroupName(groupName).build();
+ try {
+ proxy.addGroup(null, request);
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public void removeGroup(String name) throws IOException {
+ GroupAdminProtos.RemoveGroupRequest request =
+ GroupAdminProtos.RemoveGroupRequest.newBuilder()
+ .setGroupName(name).build();
+ try {
+ proxy.removeGroup(null, request);
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public boolean balanceGroup(String name) throws IOException {
+ GroupAdminProtos.BalanceGroupRequest request =
+ GroupAdminProtos.BalanceGroupRequest.newBuilder()
+ .setGroupName(name).build();
+
+ try {
+ return proxy.balanceGroup(null, request).getBalanceRan();
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public List listGroups() throws IOException {
+ try {
+ List resp =
+ proxy.listGroupInfos(null, GroupAdminProtos.ListGroupInfosRequest.newBuilder().build())
+ .getGroupInfoList();
+ List result = new ArrayList(resp.size());
+ for(GroupProtos.GroupInfo entry: resp) {
+ result.add(ProtobufUtil.toGroupInfo(entry));
+ }
+ return result;
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public GroupInfo getGroupOfServer(HostAndPort hostPort) throws IOException {
+ GroupAdminProtos.GetGroupInfoOfServerRequest request =
+ GroupAdminProtos.GetGroupInfoOfServerRequest.newBuilder()
+ .setServer(HBaseProtos.HostPort.newBuilder()
+ .setHostName(hostPort.getHostText())
+ .setPort(hostPort.getPort())
+ .build())
+ .build();
+ try {
+ return ProtobufUtil.toGroupInfo(
+ proxy.getGroupInfoOfServer(null, request).getGroupInfo());
+ } catch (ServiceException e) {
+ throw ProtobufUtil.getRemoteException(e);
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ }
+}
diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 4aff7f3..45e2d58 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -68,9 +68,9 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.group.GroupInfo;
import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.io.TimeRange;
-import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
@@ -111,6 +111,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
+import org.apache.hadoop.hbase.protobuf.generated.GroupProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
@@ -121,15 +122,16 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableReques
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos;
+import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
+import org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
-import org.apache.hadoop.hbase.protobuf.generated.WALProtos.BulkLoadDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor;
import org.apache.hadoop.hbase.quotas.QuotaScope;
import org.apache.hadoop.hbase.quotas.QuotaType;
@@ -156,6 +158,7 @@ import org.apache.hadoop.security.token.Token;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
+import com.google.common.net.HostAndPort;
import com.google.protobuf.ByteString;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.InvalidProtocolBufferException;
@@ -2869,7 +2872,7 @@ public final class ProtobufUtil {
}
return result;
}
-
+
/**
* Convert a protocol buffer TimeUnit to a client TimeUnit
* @param proto
@@ -3085,7 +3088,7 @@ public final class ProtobufUtil {
* @param builder current message builder
* @param in InputStream containing protobuf data
* @param size known size of protobuf data
- * @throws IOException
+ * @throws IOException
*/
public static void mergeFrom(Message.Builder builder, InputStream in, int size)
throws IOException {
@@ -3100,7 +3103,7 @@ public final class ProtobufUtil {
* buffers where the message size is not known
* @param builder current message builder
* @param in InputStream containing protobuf data
- * @throws IOException
+ * @throws IOException
*/
public static void mergeFrom(Message.Builder builder, InputStream in)
throws IOException {
@@ -3114,8 +3117,8 @@ public final class ProtobufUtil {
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
* buffers when working with ByteStrings
* @param builder current message builder
- * @param bs ByteString containing the
- * @throws IOException
+ * @param bs ByteString containing the
+ * @throws IOException
*/
public static void mergeFrom(Message.Builder builder, ByteString bs) throws IOException {
final CodedInputStream codedInput = bs.newCodedInput();
@@ -3129,7 +3132,7 @@ public final class ProtobufUtil {
* buffers when working with byte arrays
* @param builder current message builder
* @param b byte array
- * @throws IOException
+ * @throws IOException
*/
public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException {
final CodedInputStream codedInput = CodedInputStream.newInstance(b);
@@ -3190,4 +3193,34 @@ public final class ProtobufUtil {
builder.setSrcChecksum(VersionInfo.getSrcChecksum());
return builder.build();
}
+
+ public static GroupInfo toGroupInfo(GroupProtos.GroupInfo proto) {
+ GroupInfo groupInfo = new GroupInfo(proto.getName());
+ for(HBaseProtos.HostPort el: proto.getServersList()) {
+ groupInfo.addServer(HostAndPort.fromParts(el.getHostName(), el.getPort()));
+ }
+ for(HBaseProtos.TableName pTableName: proto.getTablesList()) {
+ groupInfo.addTable(ProtobufUtil.toTableName(pTableName));
+ }
+ return groupInfo;
+ }
+
+ public static GroupProtos.GroupInfo toProtoGroupInfo(GroupInfo pojo) {
+ List tables =
+ new ArrayList(pojo.getTables().size());
+ for(TableName arg: pojo.getTables()) {
+ tables.add(ProtobufUtil.toProtoTableName(arg));
+ }
+ List hostports =
+ new ArrayList(pojo.getServers().size());
+ for(HostAndPort el: pojo.getServers()) {
+ hostports.add(HBaseProtos.HostPort.newBuilder()
+ .setHostName(el.getHostText())
+ .setPort(el.getPort())
+ .build());
+ }
+ return GroupProtos.GroupInfo.newBuilder().setName(pojo.getName())
+ .addAllServers(hostports)
+ .addAllTables(tables).build();
+ }
}
diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/group/GroupInfo.java hbase-common/src/main/java/org/apache/hadoop/hbase/group/GroupInfo.java
new file mode 100644
index 0000000..2f78224
--- /dev/null
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/group/GroupInfo.java
@@ -0,0 +1,177 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.group;
+
+import com.google.common.collect.Sets;
+import com.google.common.net.HostAndPort;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
+import java.io.Serializable;
+import java.util.Collection;
+import java.util.NavigableSet;
+import java.util.Set;
+
+/**
+ * Stores the group information of region server groups.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class GroupInfo implements Serializable {
+
+ public static final String DEFAULT_GROUP = "default";
+ public static final String NAMESPACEDESC_PROP_GROUP = "hbase.rsgroup.name";
+
+ private String name;
+ private Set servers;
+ private NavigableSet tables;
+
+ public GroupInfo(String name) {
+ this(name, Sets.newHashSet(), Sets.newTreeSet());
+ }
+
+ GroupInfo(String name,
+ Set servers,
+ NavigableSet tables) {
+ this.name = name;
+ this.servers = servers;
+ this.tables = tables;
+ }
+
+ public GroupInfo(GroupInfo src) {
+ name = src.getName();
+ servers = Sets.newHashSet(src.getServers());
+ tables = Sets.newTreeSet(src.getTables());
+ }
+
+ /**
+ * Get group name.
+ *
+ * @return
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Adds the server to the group.
+ *
+ * @param hostPort the server
+ */
+ public void addServer(HostAndPort hostPort){
+ servers.add(hostPort);
+ }
+
+ /**
+ * Adds a group of servers.
+ *
+ * @param hostPort the servers
+ */
+ public void addAllServers(Collection hostPort){
+ servers.addAll(hostPort);
+ }
+
+ /**
+ * @param hostPort
+ * @return true, if a server with hostPort is found
+ */
+ public boolean containsServer(HostAndPort hostPort) {
+ return servers.contains(hostPort);
+ }
+
+ /**
+ * Get list of servers.
+ *
+ * @return
+ */
+ public Set getServers() {
+ return servers;
+ }
+
+ /**
+ * Remove a server from this group.
+ *
+ * @param hostPort
+ */
+ public boolean removeServer(HostAndPort hostPort) {
+ return servers.remove(hostPort);
+ }
+
+ /**
+ * Set of tables that are members of this group
+ * @return
+ */
+ public NavigableSet getTables() {
+ return tables;
+ }
+
+ public void addTable(TableName table) {
+ tables.add(table);
+ }
+
+ public void addAllTables(Collection arg) {
+ tables.addAll(arg);
+ }
+
+ public boolean containsTable(TableName table) {
+ return tables.contains(table);
+ }
+
+ public boolean removeTable(TableName table) {
+ return tables.remove(table);
+ }
+
+ @Override
+ public String toString() {
+ StringBuffer sb = new StringBuffer();
+ sb.append("GroupName:");
+ sb.append(this.name);
+ sb.append(", ");
+ sb.append(" Servers:");
+ sb.append(this.servers);
+ return sb.toString();
+
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ GroupInfo groupInfo = (GroupInfo) o;
+
+ if (!name.equals(groupInfo.name)) return false;
+ if (!servers.equals(groupInfo.servers)) return false;
+ if (!tables.equals(groupInfo.tables)) return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = servers.hashCode();
+ result = 31 * result + tables.hashCode();
+ result = 31 * result + name.hashCode();
+ return result;
+ }
+
+}
diff --git hbase-it/src/test/java/org/apache/hadoop/hbase/group/IntegrationTestGroup.java hbase-it/src/test/java/org/apache/hadoop/hbase/group/IntegrationTestGroup.java
new file mode 100644
index 0000000..e8ee9aa
--- /dev/null
+++ hbase-it/src/test/java/org/apache/hadoop/hbase/group/IntegrationTestGroup.java
@@ -0,0 +1,89 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.group;
+
+import com.google.common.collect.Sets;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.testclassification.IntegrationTests;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Runs all of the units tests defined in TestGroupBase
+ * as an integration test.
+ * Requires TestGroupBase.NUM_SLAVE_BASE servers to run.
+ */
+@Category(IntegrationTests.class)
+public class IntegrationTestGroup extends TestGroupsBase {
+ //Integration specific
+ private final static Log LOG = LogFactory.getLog(IntegrationTestGroup.class);
+ private static boolean initialized = false;
+
+ @Before
+ public void beforeMethod() throws Exception {
+ if(!initialized) {
+ LOG.info("Setting up IntegrationTestGroup");
+ LOG.info("Initializing cluster with " + NUM_SLAVES_BASE + " servers");
+ TEST_UTIL = new IntegrationTestingUtility();
+ ((IntegrationTestingUtility)TEST_UTIL).initializeCluster(NUM_SLAVES_BASE);
+ //set shared configs
+ admin = TEST_UTIL.getHBaseAdmin();
+ cluster = TEST_UTIL.getHBaseClusterInterface();
+ groupAdmin = new VerifyingGroupAdminClient(GroupAdmin.newClient(TEST_UTIL.getConnection()),
+ TEST_UTIL.getConfiguration());
+ LOG.info("Done initializing cluster");
+ initialized = true;
+ //cluster may not be clean
+ //cleanup when initializing
+ afterMethod();
+ }
+ }
+
+ @After
+ public void afterMethod() throws Exception {
+ LOG.info("Cleaning up previous test run");
+ //cleanup previous artifacts
+ deleteTableIfNecessary();
+ deleteNamespaceIfNecessary();
+ deleteGroups();
+ admin.setBalancerRunning(false,true);
+
+ LOG.info("Restoring the cluster");
+ ((IntegrationTestingUtility)TEST_UTIL).restoreCluster();
+ LOG.info("Done restoring the cluster");
+
+ TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate() {
+ @Override
+ public boolean evaluate() throws Exception {
+ LOG.info("Waiting for cleanup to finish "+groupAdmin.listGroups());
+ //Might be greater since moving servers back to default
+ //is after starting a server
+ return groupAdmin.getGroupInfo(GroupInfo.DEFAULT_GROUP).getServers().size()
+ == NUM_SLAVES_BASE;
+ }
+ });
+ LOG.info("Done cleaning up previous test run");
+ }
+}
\ No newline at end of file
diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/GroupAdminProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/GroupAdminProtos.java
new file mode 100644
index 0000000..282ff46
--- /dev/null
+++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/GroupAdminProtos.java
@@ -0,0 +1,11852 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: GroupAdmin.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class GroupAdminProtos {
+ private GroupAdminProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface ListTablesOfGroupRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string group_name = 1;
+ /**
+ * required string group_name = 1;
+ */
+ boolean hasGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ java.lang.String getGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ com.google.protobuf.ByteString
+ getGroupNameBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListTablesOfGroupRequest}
+ */
+ public static final class ListTablesOfGroupRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements ListTablesOfGroupRequestOrBuilder {
+ // Use ListTablesOfGroupRequest.newBuilder() to construct.
+ private ListTablesOfGroupRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ListTablesOfGroupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ListTablesOfGroupRequest defaultInstance;
+ public static ListTablesOfGroupRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ListTablesOfGroupRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ListTablesOfGroupRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ groupName_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public ListTablesOfGroupRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ListTablesOfGroupRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string group_name = 1;
+ public static final int GROUP_NAME_FIELD_NUMBER = 1;
+ private java.lang.Object groupName_;
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ groupName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ groupName_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasGroupName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getGroupNameBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getGroupNameBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest) obj;
+
+ boolean result = true;
+ result = result && (hasGroupName() == other.hasGroupName());
+ if (hasGroupName()) {
+ result = result && getGroupName()
+ .equals(other.getGroupName());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasGroupName()) {
+ hash = (37 * hash) + GROUP_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupName().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListTablesOfGroupRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ groupName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.groupName_ = groupName_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest.getDefaultInstance()) return this;
+ if (other.hasGroupName()) {
+ bitField0_ |= 0x00000001;
+ groupName_ = other.groupName_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasGroupName()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string group_name = 1;
+ private java.lang.Object groupName_ = "";
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ groupName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder clearGroupName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ groupName_ = getDefaultInstance().getGroupName();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.ListTablesOfGroupRequest)
+ }
+
+ static {
+ defaultInstance = new ListTablesOfGroupRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ListTablesOfGroupRequest)
+ }
+
+ public interface ListTablesOfGroupResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .hbase.pb.TableName table_name = 1;
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ java.util.List
+ getTableNameList();
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index);
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ int getTableNameCount();
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameOrBuilderList();
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListTablesOfGroupResponse}
+ */
+ public static final class ListTablesOfGroupResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements ListTablesOfGroupResponseOrBuilder {
+ // Use ListTablesOfGroupResponse.newBuilder() to construct.
+ private ListTablesOfGroupResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ListTablesOfGroupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ListTablesOfGroupResponse defaultInstance;
+ public static ListTablesOfGroupResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ListTablesOfGroupResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ListTablesOfGroupResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ tableName_ = new java.util.ArrayList();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ tableName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ tableName_ = java.util.Collections.unmodifiableList(tableName_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public ListTablesOfGroupResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ListTablesOfGroupResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ // repeated .hbase.pb.TableName table_name = 1;
+ public static final int TABLE_NAME_FIELD_NUMBER = 1;
+ private java.util.List tableName_;
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public java.util.List getTableNameList() {
+ return tableName_;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameOrBuilderList() {
+ return tableName_;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public int getTableNameCount() {
+ return tableName_.size();
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) {
+ return tableName_.get(index);
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
+ int index) {
+ return tableName_.get(index);
+ }
+
+ private void initFields() {
+ tableName_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getTableNameCount(); i++) {
+ if (!getTableName(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < tableName_.size(); i++) {
+ output.writeMessage(1, tableName_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < tableName_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, tableName_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse) obj;
+
+ boolean result = true;
+ result = result && getTableNameList()
+ .equals(other.getTableNameList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getTableNameCount() > 0) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableNameList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListTablesOfGroupResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getTableNameFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (tableNameBuilder_ == null) {
+ tableName_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ tableNameBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListTablesOfGroupResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (tableNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ tableName_ = java.util.Collections.unmodifiableList(tableName_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.tableName_ = tableName_;
+ } else {
+ result.tableName_ = tableNameBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse.getDefaultInstance()) return this;
+ if (tableNameBuilder_ == null) {
+ if (!other.tableName_.isEmpty()) {
+ if (tableName_.isEmpty()) {
+ tableName_ = other.tableName_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureTableNameIsMutable();
+ tableName_.addAll(other.tableName_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.tableName_.isEmpty()) {
+ if (tableNameBuilder_.isEmpty()) {
+ tableNameBuilder_.dispose();
+ tableNameBuilder_ = null;
+ tableName_ = other.tableName_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ tableNameBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getTableNameFieldBuilder() : null;
+ } else {
+ tableNameBuilder_.addAllMessages(other.tableName_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getTableNameCount(); i++) {
+ if (!getTableName(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListTablesOfGroupResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // repeated .hbase.pb.TableName table_name = 1;
+ private java.util.List tableName_ =
+ java.util.Collections.emptyList();
+ private void ensureTableNameIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ tableName_ = new java.util.ArrayList(tableName_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public java.util.List getTableNameList() {
+ if (tableNameBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(tableName_);
+ } else {
+ return tableNameBuilder_.getMessageList();
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public int getTableNameCount() {
+ if (tableNameBuilder_ == null) {
+ return tableName_.size();
+ } else {
+ return tableNameBuilder_.getCount();
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) {
+ if (tableNameBuilder_ == null) {
+ return tableName_.get(index);
+ } else {
+ return tableNameBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder setTableName(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTableNameIsMutable();
+ tableName_.set(index, value);
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder setTableName(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ tableName_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder addTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTableNameIsMutable();
+ tableName_.add(value);
+ onChanged();
+ } else {
+ tableNameBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder addTableName(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTableNameIsMutable();
+ tableName_.add(index, value);
+ onChanged();
+ } else {
+ tableNameBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder addTableName(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ tableName_.add(builderForValue.build());
+ onChanged();
+ } else {
+ tableNameBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder addTableName(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ tableName_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ tableNameBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder addAllTableName(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ super.addAll(values, tableName_);
+ onChanged();
+ } else {
+ tableNameBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder clearTableName() {
+ if (tableNameBuilder_ == null) {
+ tableName_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public Builder removeTableName(int index) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ tableName_.remove(index);
+ onChanged();
+ } else {
+ tableNameBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder(
+ int index) {
+ return getTableNameFieldBuilder().getBuilder(index);
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
+ int index) {
+ if (tableNameBuilder_ == null) {
+ return tableName_.get(index); } else {
+ return tableNameBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameOrBuilderList() {
+ if (tableNameBuilder_ != null) {
+ return tableNameBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(tableName_);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNameBuilder() {
+ return getTableNameFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNameBuilder(
+ int index) {
+ return getTableNameFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 1;
+ */
+ public java.util.List
+ getTableNameBuilderList() {
+ return getTableNameFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameFieldBuilder() {
+ if (tableNameBuilder_ == null) {
+ tableNameBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ tableName_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ tableName_ = null;
+ }
+ return tableNameBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.ListTablesOfGroupResponse)
+ }
+
+ static {
+ defaultInstance = new ListTablesOfGroupResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ListTablesOfGroupResponse)
+ }
+
+ public interface GetGroupInfoRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string group_name = 1;
+ /**
+ * required string group_name = 1;
+ */
+ boolean hasGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ java.lang.String getGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ com.google.protobuf.ByteString
+ getGroupNameBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoRequest}
+ */
+ public static final class GetGroupInfoRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements GetGroupInfoRequestOrBuilder {
+ // Use GetGroupInfoRequest.newBuilder() to construct.
+ private GetGroupInfoRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GetGroupInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GetGroupInfoRequest defaultInstance;
+ public static GetGroupInfoRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetGroupInfoRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GetGroupInfoRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ groupName_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public GetGroupInfoRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetGroupInfoRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string group_name = 1;
+ public static final int GROUP_NAME_FIELD_NUMBER = 1;
+ private java.lang.Object groupName_;
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ groupName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ groupName_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasGroupName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getGroupNameBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getGroupNameBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest) obj;
+
+ boolean result = true;
+ result = result && (hasGroupName() == other.hasGroupName());
+ if (hasGroupName()) {
+ result = result && getGroupName()
+ .equals(other.getGroupName());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasGroupName()) {
+ hash = (37 * hash) + GROUP_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupName().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ groupName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.groupName_ = groupName_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.getDefaultInstance()) return this;
+ if (other.hasGroupName()) {
+ bitField0_ |= 0x00000001;
+ groupName_ = other.groupName_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasGroupName()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string group_name = 1;
+ private java.lang.Object groupName_ = "";
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ groupName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder clearGroupName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ groupName_ = getDefaultInstance().getGroupName();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.GetGroupInfoRequest)
+ }
+
+ static {
+ defaultInstance = new GetGroupInfoRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.GetGroupInfoRequest)
+ }
+
+ public interface GetGroupInfoResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ boolean hasGroupInfo();
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo();
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoResponse}
+ */
+ public static final class GetGroupInfoResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements GetGroupInfoResponseOrBuilder {
+ // Use GetGroupInfoResponse.newBuilder() to construct.
+ private GetGroupInfoResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GetGroupInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GetGroupInfoResponse defaultInstance;
+ public static GetGroupInfoResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetGroupInfoResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GetGroupInfoResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = groupInfo_.toBuilder();
+ }
+ groupInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(groupInfo_);
+ groupInfo_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public GetGroupInfoResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetGroupInfoResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ public static final int GROUP_INFO_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo groupInfo_;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public boolean hasGroupInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo() {
+ return groupInfo_;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder() {
+ return groupInfo_;
+ }
+
+ private void initFields() {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (hasGroupInfo()) {
+ if (!getGroupInfo().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, groupInfo_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, groupInfo_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse) obj;
+
+ boolean result = true;
+ result = result && (hasGroupInfo() == other.hasGroupInfo());
+ if (hasGroupInfo()) {
+ result = result && getGroupInfo()
+ .equals(other.getGroupInfo());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasGroupInfo()) {
+ hash = (37 * hash) + GROUP_INFO_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupInfo().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getGroupInfoFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ } else {
+ groupInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (groupInfoBuilder_ == null) {
+ result.groupInfo_ = groupInfo_;
+ } else {
+ result.groupInfo_ = groupInfoBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.getDefaultInstance()) return this;
+ if (other.hasGroupInfo()) {
+ mergeGroupInfo(other.getGroupInfo());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (hasGroupInfo()) {
+ if (!getGroupInfo().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder> groupInfoBuilder_;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public boolean hasGroupInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo() {
+ if (groupInfoBuilder_ == null) {
+ return groupInfo_;
+ } else {
+ return groupInfoBuilder_.getMessage();
+ }
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder setGroupInfo(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ groupInfo_ = value;
+ onChanged();
+ } else {
+ groupInfoBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder setGroupInfo(
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder builderForValue) {
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = builderForValue.build();
+ onChanged();
+ } else {
+ groupInfoBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder mergeGroupInfo(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ groupInfo_ != org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance()) {
+ groupInfo_ =
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.newBuilder(groupInfo_).mergeFrom(value).buildPartial();
+ } else {
+ groupInfo_ = value;
+ }
+ onChanged();
+ } else {
+ groupInfoBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder clearGroupInfo() {
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ onChanged();
+ } else {
+ groupInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder getGroupInfoBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getGroupInfoFieldBuilder().getBuilder();
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder() {
+ if (groupInfoBuilder_ != null) {
+ return groupInfoBuilder_.getMessageOrBuilder();
+ } else {
+ return groupInfo_;
+ }
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>
+ getGroupInfoFieldBuilder() {
+ if (groupInfoBuilder_ == null) {
+ groupInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>(
+ groupInfo_,
+ getParentForChildren(),
+ isClean());
+ groupInfo_ = null;
+ }
+ return groupInfoBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.GetGroupInfoResponse)
+ }
+
+ static {
+ defaultInstance = new GetGroupInfoResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.GetGroupInfoResponse)
+ }
+
+ public interface GetGroupInfoOfTableRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .hbase.pb.TableName table_name = 1;
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ boolean hasTableName();
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoOfTableRequest}
+ */
+ public static final class GetGroupInfoOfTableRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements GetGroupInfoOfTableRequestOrBuilder {
+ // Use GetGroupInfoOfTableRequest.newBuilder() to construct.
+ private GetGroupInfoOfTableRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GetGroupInfoOfTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GetGroupInfoOfTableRequest defaultInstance;
+ public static GetGroupInfoOfTableRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetGroupInfoOfTableRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GetGroupInfoOfTableRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = tableName_.toBuilder();
+ }
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(tableName_);
+ tableName_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public GetGroupInfoOfTableRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetGroupInfoOfTableRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required .hbase.pb.TableName table_name = 1;
+ public static final int TABLE_NAME_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ return tableName_;
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ return tableName_;
+ }
+
+ private void initFields() {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasTableName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getTableName().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, tableName_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, tableName_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest) obj;
+
+ boolean result = true;
+ result = result && (hasTableName() == other.hasTableName());
+ if (hasTableName()) {
+ result = result && getTableName()
+ .equals(other.getTableName());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasTableName()) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableName().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoOfTableRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getTableNameFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (tableNameBuilder_ == null) {
+ result.tableName_ = tableName_;
+ } else {
+ result.tableName_ = tableNameBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.getDefaultInstance()) return this;
+ if (other.hasTableName()) {
+ mergeTableName(other.getTableName());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasTableName()) {
+
+ return false;
+ }
+ if (!getTableName().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required .hbase.pb.TableName table_name = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ if (tableNameBuilder_ == null) {
+ return tableName_;
+ } else {
+ return tableNameBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ tableName_ = value;
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public Builder setTableName(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ tableName_ = builderForValue.build();
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ } else {
+ tableName_ = value;
+ }
+ onChanged();
+ } else {
+ tableNameBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public Builder clearTableName() {
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ onChanged();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getTableNameFieldBuilder().getBuilder();
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ if (tableNameBuilder_ != null) {
+ return tableNameBuilder_.getMessageOrBuilder();
+ } else {
+ return tableName_;
+ }
+ }
+ /**
+ * required .hbase.pb.TableName table_name = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameFieldBuilder() {
+ if (tableNameBuilder_ == null) {
+ tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ tableName_,
+ getParentForChildren(),
+ isClean());
+ tableName_ = null;
+ }
+ return tableNameBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.GetGroupInfoOfTableRequest)
+ }
+
+ static {
+ defaultInstance = new GetGroupInfoOfTableRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.GetGroupInfoOfTableRequest)
+ }
+
+ public interface GetGroupInfoOfTableResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ boolean hasGroupInfo();
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo();
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoOfTableResponse}
+ */
+ public static final class GetGroupInfoOfTableResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements GetGroupInfoOfTableResponseOrBuilder {
+ // Use GetGroupInfoOfTableResponse.newBuilder() to construct.
+ private GetGroupInfoOfTableResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GetGroupInfoOfTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GetGroupInfoOfTableResponse defaultInstance;
+ public static GetGroupInfoOfTableResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetGroupInfoOfTableResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GetGroupInfoOfTableResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = groupInfo_.toBuilder();
+ }
+ groupInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(groupInfo_);
+ groupInfo_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public GetGroupInfoOfTableResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetGroupInfoOfTableResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ public static final int GROUP_INFO_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo groupInfo_;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public boolean hasGroupInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo() {
+ return groupInfo_;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder() {
+ return groupInfo_;
+ }
+
+ private void initFields() {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (hasGroupInfo()) {
+ if (!getGroupInfo().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, groupInfo_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, groupInfo_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse) obj;
+
+ boolean result = true;
+ result = result && (hasGroupInfo() == other.hasGroupInfo());
+ if (hasGroupInfo()) {
+ result = result && getGroupInfo()
+ .equals(other.getGroupInfo());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasGroupInfo()) {
+ hash = (37 * hash) + GROUP_INFO_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupInfo().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoOfTableResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getGroupInfoFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ } else {
+ groupInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfTableResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (groupInfoBuilder_ == null) {
+ result.groupInfo_ = groupInfo_;
+ } else {
+ result.groupInfo_ = groupInfoBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.getDefaultInstance()) return this;
+ if (other.hasGroupInfo()) {
+ mergeGroupInfo(other.getGroupInfo());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (hasGroupInfo()) {
+ if (!getGroupInfo().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder> groupInfoBuilder_;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public boolean hasGroupInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo() {
+ if (groupInfoBuilder_ == null) {
+ return groupInfo_;
+ } else {
+ return groupInfoBuilder_.getMessage();
+ }
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder setGroupInfo(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ groupInfo_ = value;
+ onChanged();
+ } else {
+ groupInfoBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder setGroupInfo(
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder builderForValue) {
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = builderForValue.build();
+ onChanged();
+ } else {
+ groupInfoBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder mergeGroupInfo(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ groupInfo_ != org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance()) {
+ groupInfo_ =
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.newBuilder(groupInfo_).mergeFrom(value).buildPartial();
+ } else {
+ groupInfo_ = value;
+ }
+ onChanged();
+ } else {
+ groupInfoBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder clearGroupInfo() {
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ onChanged();
+ } else {
+ groupInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder getGroupInfoBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getGroupInfoFieldBuilder().getBuilder();
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder() {
+ if (groupInfoBuilder_ != null) {
+ return groupInfoBuilder_.getMessageOrBuilder();
+ } else {
+ return groupInfo_;
+ }
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>
+ getGroupInfoFieldBuilder() {
+ if (groupInfoBuilder_ == null) {
+ groupInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>(
+ groupInfo_,
+ getParentForChildren(),
+ isClean());
+ groupInfo_ = null;
+ }
+ return groupInfoBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.GetGroupInfoOfTableResponse)
+ }
+
+ static {
+ defaultInstance = new GetGroupInfoOfTableResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.GetGroupInfoOfTableResponse)
+ }
+
+ public interface MoveServersRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string target_group = 1;
+ /**
+ * required string target_group = 1;
+ */
+ boolean hasTargetGroup();
+ /**
+ * required string target_group = 1;
+ */
+ java.lang.String getTargetGroup();
+ /**
+ * required string target_group = 1;
+ */
+ com.google.protobuf.ByteString
+ getTargetGroupBytes();
+
+ // repeated .hbase.pb.HostPort servers = 2;
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ java.util.List
+ getServersList();
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServers(int index);
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ int getServersCount();
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServersOrBuilderList();
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServersOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.MoveServersRequest}
+ */
+ public static final class MoveServersRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements MoveServersRequestOrBuilder {
+ // Use MoveServersRequest.newBuilder() to construct.
+ private MoveServersRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private MoveServersRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final MoveServersRequest defaultInstance;
+ public static MoveServersRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public MoveServersRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private MoveServersRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ targetGroup_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ servers_ = new java.util.ArrayList();
+ mutable_bitField0_ |= 0x00000002;
+ }
+ servers_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ servers_ = java.util.Collections.unmodifiableList(servers_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public MoveServersRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new MoveServersRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string target_group = 1;
+ public static final int TARGET_GROUP_FIELD_NUMBER = 1;
+ private java.lang.Object targetGroup_;
+ /**
+ * required string target_group = 1;
+ */
+ public boolean hasTargetGroup() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public java.lang.String getTargetGroup() {
+ java.lang.Object ref = targetGroup_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ targetGroup_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public com.google.protobuf.ByteString
+ getTargetGroupBytes() {
+ java.lang.Object ref = targetGroup_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ targetGroup_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // repeated .hbase.pb.HostPort servers = 2;
+ public static final int SERVERS_FIELD_NUMBER = 2;
+ private java.util.List servers_;
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public java.util.List getServersList() {
+ return servers_;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServersOrBuilderList() {
+ return servers_;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public int getServersCount() {
+ return servers_.size();
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServers(int index) {
+ return servers_.get(index);
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServersOrBuilder(
+ int index) {
+ return servers_.get(index);
+ }
+
+ private void initFields() {
+ targetGroup_ = "";
+ servers_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasTargetGroup()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ for (int i = 0; i < getServersCount(); i++) {
+ if (!getServers(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTargetGroupBytes());
+ }
+ for (int i = 0; i < servers_.size(); i++) {
+ output.writeMessage(2, servers_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTargetGroupBytes());
+ }
+ for (int i = 0; i < servers_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, servers_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest) obj;
+
+ boolean result = true;
+ result = result && (hasTargetGroup() == other.hasTargetGroup());
+ if (hasTargetGroup()) {
+ result = result && getTargetGroup()
+ .equals(other.getTargetGroup());
+ }
+ result = result && getServersList()
+ .equals(other.getServersList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasTargetGroup()) {
+ hash = (37 * hash) + TARGET_GROUP_FIELD_NUMBER;
+ hash = (53 * hash) + getTargetGroup().hashCode();
+ }
+ if (getServersCount() > 0) {
+ hash = (37 * hash) + SERVERS_FIELD_NUMBER;
+ hash = (53 * hash) + getServersList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.MoveServersRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getServersFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ targetGroup_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (serversBuilder_ == null) {
+ servers_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ serversBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.targetGroup_ = targetGroup_;
+ if (serversBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ servers_ = java.util.Collections.unmodifiableList(servers_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.servers_ = servers_;
+ } else {
+ result.servers_ = serversBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.getDefaultInstance()) return this;
+ if (other.hasTargetGroup()) {
+ bitField0_ |= 0x00000001;
+ targetGroup_ = other.targetGroup_;
+ onChanged();
+ }
+ if (serversBuilder_ == null) {
+ if (!other.servers_.isEmpty()) {
+ if (servers_.isEmpty()) {
+ servers_ = other.servers_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureServersIsMutable();
+ servers_.addAll(other.servers_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.servers_.isEmpty()) {
+ if (serversBuilder_.isEmpty()) {
+ serversBuilder_.dispose();
+ serversBuilder_ = null;
+ servers_ = other.servers_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ serversBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getServersFieldBuilder() : null;
+ } else {
+ serversBuilder_.addAllMessages(other.servers_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasTargetGroup()) {
+
+ return false;
+ }
+ for (int i = 0; i < getServersCount(); i++) {
+ if (!getServers(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string target_group = 1;
+ private java.lang.Object targetGroup_ = "";
+ /**
+ * required string target_group = 1;
+ */
+ public boolean hasTargetGroup() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public java.lang.String getTargetGroup() {
+ java.lang.Object ref = targetGroup_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ targetGroup_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public com.google.protobuf.ByteString
+ getTargetGroupBytes() {
+ java.lang.Object ref = targetGroup_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ targetGroup_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public Builder setTargetGroup(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ targetGroup_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public Builder clearTargetGroup() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ targetGroup_ = getDefaultInstance().getTargetGroup();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public Builder setTargetGroupBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ targetGroup_ = value;
+ onChanged();
+ return this;
+ }
+
+ // repeated .hbase.pb.HostPort servers = 2;
+ private java.util.List servers_ =
+ java.util.Collections.emptyList();
+ private void ensureServersIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ servers_ = new java.util.ArrayList(servers_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder> serversBuilder_;
+
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public java.util.List getServersList() {
+ if (serversBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(servers_);
+ } else {
+ return serversBuilder_.getMessageList();
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public int getServersCount() {
+ if (serversBuilder_ == null) {
+ return servers_.size();
+ } else {
+ return serversBuilder_.getCount();
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServers(int index) {
+ if (serversBuilder_ == null) {
+ return servers_.get(index);
+ } else {
+ return serversBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder setServers(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort value) {
+ if (serversBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureServersIsMutable();
+ servers_.set(index, value);
+ onChanged();
+ } else {
+ serversBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder setServers(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder builderForValue) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ servers_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ serversBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder addServers(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort value) {
+ if (serversBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureServersIsMutable();
+ servers_.add(value);
+ onChanged();
+ } else {
+ serversBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder addServers(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort value) {
+ if (serversBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureServersIsMutable();
+ servers_.add(index, value);
+ onChanged();
+ } else {
+ serversBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder addServers(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder builderForValue) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ servers_.add(builderForValue.build());
+ onChanged();
+ } else {
+ serversBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder addServers(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder builderForValue) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ servers_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ serversBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder addAllServers(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort> values) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ super.addAll(values, servers_);
+ onChanged();
+ } else {
+ serversBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder clearServers() {
+ if (serversBuilder_ == null) {
+ servers_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ } else {
+ serversBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public Builder removeServers(int index) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ servers_.remove(index);
+ onChanged();
+ } else {
+ serversBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder getServersBuilder(
+ int index) {
+ return getServersFieldBuilder().getBuilder(index);
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServersOrBuilder(
+ int index) {
+ if (serversBuilder_ == null) {
+ return servers_.get(index); } else {
+ return serversBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServersOrBuilderList() {
+ if (serversBuilder_ != null) {
+ return serversBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(servers_);
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder addServersBuilder() {
+ return getServersFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder addServersBuilder(
+ int index) {
+ return getServersFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 2;
+ */
+ public java.util.List
+ getServersBuilderList() {
+ return getServersFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServersFieldBuilder() {
+ if (serversBuilder_ == null) {
+ serversBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>(
+ servers_,
+ ((bitField0_ & 0x00000002) == 0x00000002),
+ getParentForChildren(),
+ isClean());
+ servers_ = null;
+ }
+ return serversBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.MoveServersRequest)
+ }
+
+ static {
+ defaultInstance = new MoveServersRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.MoveServersRequest)
+ }
+
+ public interface MoveServersResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code hbase.pb.MoveServersResponse}
+ */
+ public static final class MoveServersResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements MoveServersResponseOrBuilder {
+ // Use MoveServersResponse.newBuilder() to construct.
+ private MoveServersResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private MoveServersResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final MoveServersResponse defaultInstance;
+ public static MoveServersResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public MoveServersResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private MoveServersResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public MoveServersResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new MoveServersResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.MoveServersResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveServersResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.MoveServersResponse)
+ }
+
+ static {
+ defaultInstance = new MoveServersResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.MoveServersResponse)
+ }
+
+ public interface MoveTablesRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string target_group = 1;
+ /**
+ * required string target_group = 1;
+ */
+ boolean hasTargetGroup();
+ /**
+ * required string target_group = 1;
+ */
+ java.lang.String getTargetGroup();
+ /**
+ * required string target_group = 1;
+ */
+ com.google.protobuf.ByteString
+ getTargetGroupBytes();
+
+ // repeated .hbase.pb.TableName table_name = 2;
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ java.util.List
+ getTableNameList();
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index);
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ int getTableNameCount();
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameOrBuilderList();
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.MoveTablesRequest}
+ */
+ public static final class MoveTablesRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements MoveTablesRequestOrBuilder {
+ // Use MoveTablesRequest.newBuilder() to construct.
+ private MoveTablesRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private MoveTablesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final MoveTablesRequest defaultInstance;
+ public static MoveTablesRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public MoveTablesRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private MoveTablesRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ targetGroup_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ tableName_ = new java.util.ArrayList();
+ mutable_bitField0_ |= 0x00000002;
+ }
+ tableName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ tableName_ = java.util.Collections.unmodifiableList(tableName_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public MoveTablesRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new MoveTablesRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string target_group = 1;
+ public static final int TARGET_GROUP_FIELD_NUMBER = 1;
+ private java.lang.Object targetGroup_;
+ /**
+ * required string target_group = 1;
+ */
+ public boolean hasTargetGroup() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public java.lang.String getTargetGroup() {
+ java.lang.Object ref = targetGroup_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ targetGroup_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public com.google.protobuf.ByteString
+ getTargetGroupBytes() {
+ java.lang.Object ref = targetGroup_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ targetGroup_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // repeated .hbase.pb.TableName table_name = 2;
+ public static final int TABLE_NAME_FIELD_NUMBER = 2;
+ private java.util.List tableName_;
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public java.util.List getTableNameList() {
+ return tableName_;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameOrBuilderList() {
+ return tableName_;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public int getTableNameCount() {
+ return tableName_.size();
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) {
+ return tableName_.get(index);
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
+ int index) {
+ return tableName_.get(index);
+ }
+
+ private void initFields() {
+ targetGroup_ = "";
+ tableName_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasTargetGroup()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ for (int i = 0; i < getTableNameCount(); i++) {
+ if (!getTableName(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTargetGroupBytes());
+ }
+ for (int i = 0; i < tableName_.size(); i++) {
+ output.writeMessage(2, tableName_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTargetGroupBytes());
+ }
+ for (int i = 0; i < tableName_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, tableName_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest) obj;
+
+ boolean result = true;
+ result = result && (hasTargetGroup() == other.hasTargetGroup());
+ if (hasTargetGroup()) {
+ result = result && getTargetGroup()
+ .equals(other.getTargetGroup());
+ }
+ result = result && getTableNameList()
+ .equals(other.getTableNameList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasTargetGroup()) {
+ hash = (37 * hash) + TARGET_GROUP_FIELD_NUMBER;
+ hash = (53 * hash) + getTargetGroup().hashCode();
+ }
+ if (getTableNameCount() > 0) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableNameList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.MoveTablesRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getTableNameFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ targetGroup_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (tableNameBuilder_ == null) {
+ tableName_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ tableNameBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.targetGroup_ = targetGroup_;
+ if (tableNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ tableName_ = java.util.Collections.unmodifiableList(tableName_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.tableName_ = tableName_;
+ } else {
+ result.tableName_ = tableNameBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.getDefaultInstance()) return this;
+ if (other.hasTargetGroup()) {
+ bitField0_ |= 0x00000001;
+ targetGroup_ = other.targetGroup_;
+ onChanged();
+ }
+ if (tableNameBuilder_ == null) {
+ if (!other.tableName_.isEmpty()) {
+ if (tableName_.isEmpty()) {
+ tableName_ = other.tableName_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureTableNameIsMutable();
+ tableName_.addAll(other.tableName_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.tableName_.isEmpty()) {
+ if (tableNameBuilder_.isEmpty()) {
+ tableNameBuilder_.dispose();
+ tableNameBuilder_ = null;
+ tableName_ = other.tableName_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ tableNameBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getTableNameFieldBuilder() : null;
+ } else {
+ tableNameBuilder_.addAllMessages(other.tableName_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasTargetGroup()) {
+
+ return false;
+ }
+ for (int i = 0; i < getTableNameCount(); i++) {
+ if (!getTableName(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string target_group = 1;
+ private java.lang.Object targetGroup_ = "";
+ /**
+ * required string target_group = 1;
+ */
+ public boolean hasTargetGroup() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public java.lang.String getTargetGroup() {
+ java.lang.Object ref = targetGroup_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ targetGroup_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public com.google.protobuf.ByteString
+ getTargetGroupBytes() {
+ java.lang.Object ref = targetGroup_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ targetGroup_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public Builder setTargetGroup(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ targetGroup_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public Builder clearTargetGroup() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ targetGroup_ = getDefaultInstance().getTargetGroup();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string target_group = 1;
+ */
+ public Builder setTargetGroupBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ targetGroup_ = value;
+ onChanged();
+ return this;
+ }
+
+ // repeated .hbase.pb.TableName table_name = 2;
+ private java.util.List tableName_ =
+ java.util.Collections.emptyList();
+ private void ensureTableNameIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ tableName_ = new java.util.ArrayList(tableName_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public java.util.List getTableNameList() {
+ if (tableNameBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(tableName_);
+ } else {
+ return tableNameBuilder_.getMessageList();
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public int getTableNameCount() {
+ if (tableNameBuilder_ == null) {
+ return tableName_.size();
+ } else {
+ return tableNameBuilder_.getCount();
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) {
+ if (tableNameBuilder_ == null) {
+ return tableName_.get(index);
+ } else {
+ return tableNameBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder setTableName(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTableNameIsMutable();
+ tableName_.set(index, value);
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder setTableName(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ tableName_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder addTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTableNameIsMutable();
+ tableName_.add(value);
+ onChanged();
+ } else {
+ tableNameBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder addTableName(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTableNameIsMutable();
+ tableName_.add(index, value);
+ onChanged();
+ } else {
+ tableNameBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder addTableName(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ tableName_.add(builderForValue.build());
+ onChanged();
+ } else {
+ tableNameBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder addTableName(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ tableName_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ tableNameBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder addAllTableName(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ super.addAll(values, tableName_);
+ onChanged();
+ } else {
+ tableNameBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder clearTableName() {
+ if (tableNameBuilder_ == null) {
+ tableName_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public Builder removeTableName(int index) {
+ if (tableNameBuilder_ == null) {
+ ensureTableNameIsMutable();
+ tableName_.remove(index);
+ onChanged();
+ } else {
+ tableNameBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder(
+ int index) {
+ return getTableNameFieldBuilder().getBuilder(index);
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
+ int index) {
+ if (tableNameBuilder_ == null) {
+ return tableName_.get(index); } else {
+ return tableNameBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameOrBuilderList() {
+ if (tableNameBuilder_ != null) {
+ return tableNameBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(tableName_);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNameBuilder() {
+ return getTableNameFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNameBuilder(
+ int index) {
+ return getTableNameFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.TableName table_name = 2;
+ */
+ public java.util.List
+ getTableNameBuilderList() {
+ return getTableNameFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameFieldBuilder() {
+ if (tableNameBuilder_ == null) {
+ tableNameBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ tableName_,
+ ((bitField0_ & 0x00000002) == 0x00000002),
+ getParentForChildren(),
+ isClean());
+ tableName_ = null;
+ }
+ return tableNameBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.MoveTablesRequest)
+ }
+
+ static {
+ defaultInstance = new MoveTablesRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.MoveTablesRequest)
+ }
+
+ public interface MoveTablesResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code hbase.pb.MoveTablesResponse}
+ */
+ public static final class MoveTablesResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements MoveTablesResponseOrBuilder {
+ // Use MoveTablesResponse.newBuilder() to construct.
+ private MoveTablesResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private MoveTablesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final MoveTablesResponse defaultInstance;
+ public static MoveTablesResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public MoveTablesResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private MoveTablesResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public MoveTablesResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new MoveTablesResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.MoveTablesResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_MoveTablesResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.MoveTablesResponse)
+ }
+
+ static {
+ defaultInstance = new MoveTablesResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.MoveTablesResponse)
+ }
+
+ public interface AddGroupRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string group_name = 1;
+ /**
+ * required string group_name = 1;
+ */
+ boolean hasGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ java.lang.String getGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ com.google.protobuf.ByteString
+ getGroupNameBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.AddGroupRequest}
+ */
+ public static final class AddGroupRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements AddGroupRequestOrBuilder {
+ // Use AddGroupRequest.newBuilder() to construct.
+ private AddGroupRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private AddGroupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final AddGroupRequest defaultInstance;
+ public static AddGroupRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AddGroupRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private AddGroupRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ groupName_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public AddGroupRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new AddGroupRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string group_name = 1;
+ public static final int GROUP_NAME_FIELD_NUMBER = 1;
+ private java.lang.Object groupName_;
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ groupName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ groupName_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasGroupName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getGroupNameBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getGroupNameBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest) obj;
+
+ boolean result = true;
+ result = result && (hasGroupName() == other.hasGroupName());
+ if (hasGroupName()) {
+ result = result && getGroupName()
+ .equals(other.getGroupName());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasGroupName()) {
+ hash = (37 * hash) + GROUP_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupName().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.AddGroupRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ groupName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.groupName_ = groupName_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.getDefaultInstance()) return this;
+ if (other.hasGroupName()) {
+ bitField0_ |= 0x00000001;
+ groupName_ = other.groupName_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasGroupName()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string group_name = 1;
+ private java.lang.Object groupName_ = "";
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ groupName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder clearGroupName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ groupName_ = getDefaultInstance().getGroupName();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.AddGroupRequest)
+ }
+
+ static {
+ defaultInstance = new AddGroupRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.AddGroupRequest)
+ }
+
+ public interface AddGroupResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code hbase.pb.AddGroupResponse}
+ */
+ public static final class AddGroupResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements AddGroupResponseOrBuilder {
+ // Use AddGroupResponse.newBuilder() to construct.
+ private AddGroupResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private AddGroupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final AddGroupResponse defaultInstance;
+ public static AddGroupResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AddGroupResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private AddGroupResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public AddGroupResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new AddGroupResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.AddGroupResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_AddGroupResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.AddGroupResponse)
+ }
+
+ static {
+ defaultInstance = new AddGroupResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.AddGroupResponse)
+ }
+
+ public interface RemoveGroupRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string group_name = 1;
+ /**
+ * required string group_name = 1;
+ */
+ boolean hasGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ java.lang.String getGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ com.google.protobuf.ByteString
+ getGroupNameBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.RemoveGroupRequest}
+ */
+ public static final class RemoveGroupRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements RemoveGroupRequestOrBuilder {
+ // Use RemoveGroupRequest.newBuilder() to construct.
+ private RemoveGroupRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private RemoveGroupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final RemoveGroupRequest defaultInstance;
+ public static RemoveGroupRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RemoveGroupRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private RemoveGroupRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ groupName_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public RemoveGroupRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new RemoveGroupRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string group_name = 1;
+ public static final int GROUP_NAME_FIELD_NUMBER = 1;
+ private java.lang.Object groupName_;
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ groupName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ groupName_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasGroupName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getGroupNameBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getGroupNameBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest) obj;
+
+ boolean result = true;
+ result = result && (hasGroupName() == other.hasGroupName());
+ if (hasGroupName()) {
+ result = result && getGroupName()
+ .equals(other.getGroupName());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasGroupName()) {
+ hash = (37 * hash) + GROUP_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupName().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.RemoveGroupRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ groupName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.groupName_ = groupName_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.getDefaultInstance()) return this;
+ if (other.hasGroupName()) {
+ bitField0_ |= 0x00000001;
+ groupName_ = other.groupName_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasGroupName()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string group_name = 1;
+ private java.lang.Object groupName_ = "";
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ groupName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder clearGroupName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ groupName_ = getDefaultInstance().getGroupName();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.RemoveGroupRequest)
+ }
+
+ static {
+ defaultInstance = new RemoveGroupRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.RemoveGroupRequest)
+ }
+
+ public interface RemoveGroupResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code hbase.pb.RemoveGroupResponse}
+ */
+ public static final class RemoveGroupResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements RemoveGroupResponseOrBuilder {
+ // Use RemoveGroupResponse.newBuilder() to construct.
+ private RemoveGroupResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private RemoveGroupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final RemoveGroupResponse defaultInstance;
+ public static RemoveGroupResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RemoveGroupResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private RemoveGroupResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public RemoveGroupResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new RemoveGroupResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.RemoveGroupResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_RemoveGroupResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.RemoveGroupResponse)
+ }
+
+ static {
+ defaultInstance = new RemoveGroupResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.RemoveGroupResponse)
+ }
+
+ public interface BalanceGroupRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string group_name = 1;
+ /**
+ * required string group_name = 1;
+ */
+ boolean hasGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ java.lang.String getGroupName();
+ /**
+ * required string group_name = 1;
+ */
+ com.google.protobuf.ByteString
+ getGroupNameBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BalanceGroupRequest}
+ */
+ public static final class BalanceGroupRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements BalanceGroupRequestOrBuilder {
+ // Use BalanceGroupRequest.newBuilder() to construct.
+ private BalanceGroupRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private BalanceGroupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final BalanceGroupRequest defaultInstance;
+ public static BalanceGroupRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public BalanceGroupRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private BalanceGroupRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ groupName_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public BalanceGroupRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new BalanceGroupRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string group_name = 1;
+ public static final int GROUP_NAME_FIELD_NUMBER = 1;
+ private java.lang.Object groupName_;
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ groupName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ groupName_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasGroupName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getGroupNameBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getGroupNameBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest) obj;
+
+ boolean result = true;
+ result = result && (hasGroupName() == other.hasGroupName());
+ if (hasGroupName()) {
+ result = result && getGroupName()
+ .equals(other.getGroupName());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasGroupName()) {
+ hash = (37 * hash) + GROUP_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupName().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BalanceGroupRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ groupName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.groupName_ = groupName_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.getDefaultInstance()) return this;
+ if (other.hasGroupName()) {
+ bitField0_ |= 0x00000001;
+ groupName_ = other.groupName_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasGroupName()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string group_name = 1;
+ private java.lang.Object groupName_ = "";
+ /**
+ * required string group_name = 1;
+ */
+ public boolean hasGroupName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public java.lang.String getGroupName() {
+ java.lang.Object ref = groupName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ groupName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getGroupNameBytes() {
+ java.lang.Object ref = groupName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ groupName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder clearGroupName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ groupName_ = getDefaultInstance().getGroupName();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string group_name = 1;
+ */
+ public Builder setGroupNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ groupName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.BalanceGroupRequest)
+ }
+
+ static {
+ defaultInstance = new BalanceGroupRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.BalanceGroupRequest)
+ }
+
+ public interface BalanceGroupResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bool balanceRan = 1;
+ /**
+ * required bool balanceRan = 1;
+ */
+ boolean hasBalanceRan();
+ /**
+ * required bool balanceRan = 1;
+ */
+ boolean getBalanceRan();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BalanceGroupResponse}
+ */
+ public static final class BalanceGroupResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements BalanceGroupResponseOrBuilder {
+ // Use BalanceGroupResponse.newBuilder() to construct.
+ private BalanceGroupResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private BalanceGroupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final BalanceGroupResponse defaultInstance;
+ public static BalanceGroupResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public BalanceGroupResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private BalanceGroupResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ balanceRan_ = input.readBool();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public BalanceGroupResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new BalanceGroupResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required bool balanceRan = 1;
+ public static final int BALANCERAN_FIELD_NUMBER = 1;
+ private boolean balanceRan_;
+ /**
+ * required bool balanceRan = 1;
+ */
+ public boolean hasBalanceRan() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bool balanceRan = 1;
+ */
+ public boolean getBalanceRan() {
+ return balanceRan_;
+ }
+
+ private void initFields() {
+ balanceRan_ = false;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasBalanceRan()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(1, balanceRan_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(1, balanceRan_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse) obj;
+
+ boolean result = true;
+ result = result && (hasBalanceRan() == other.hasBalanceRan());
+ if (hasBalanceRan()) {
+ result = result && (getBalanceRan()
+ == other.getBalanceRan());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasBalanceRan()) {
+ hash = (37 * hash) + BALANCERAN_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getBalanceRan());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BalanceGroupResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ balanceRan_ = false;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_BalanceGroupResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.balanceRan_ = balanceRan_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.getDefaultInstance()) return this;
+ if (other.hasBalanceRan()) {
+ setBalanceRan(other.getBalanceRan());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasBalanceRan()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required bool balanceRan = 1;
+ private boolean balanceRan_ ;
+ /**
+ * required bool balanceRan = 1;
+ */
+ public boolean hasBalanceRan() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bool balanceRan = 1;
+ */
+ public boolean getBalanceRan() {
+ return balanceRan_;
+ }
+ /**
+ * required bool balanceRan = 1;
+ */
+ public Builder setBalanceRan(boolean value) {
+ bitField0_ |= 0x00000001;
+ balanceRan_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bool balanceRan = 1;
+ */
+ public Builder clearBalanceRan() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ balanceRan_ = false;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.BalanceGroupResponse)
+ }
+
+ static {
+ defaultInstance = new BalanceGroupResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.BalanceGroupResponse)
+ }
+
+ public interface ListGroupInfosRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListGroupInfosRequest}
+ */
+ public static final class ListGroupInfosRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements ListGroupInfosRequestOrBuilder {
+ // Use ListGroupInfosRequest.newBuilder() to construct.
+ private ListGroupInfosRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ListGroupInfosRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ListGroupInfosRequest defaultInstance;
+ public static ListGroupInfosRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ListGroupInfosRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ListGroupInfosRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public ListGroupInfosRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ListGroupInfosRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListGroupInfosRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.ListGroupInfosRequest)
+ }
+
+ static {
+ defaultInstance = new ListGroupInfosRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ListGroupInfosRequest)
+ }
+
+ public interface ListGroupInfosResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .hbase.pb.GroupInfo group_info = 1;
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ java.util.List
+ getGroupInfoList();
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo(int index);
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ int getGroupInfoCount();
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>
+ getGroupInfoOrBuilderList();
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListGroupInfosResponse}
+ */
+ public static final class ListGroupInfosResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements ListGroupInfosResponseOrBuilder {
+ // Use ListGroupInfosResponse.newBuilder() to construct.
+ private ListGroupInfosResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ListGroupInfosResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ListGroupInfosResponse defaultInstance;
+ public static ListGroupInfosResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ListGroupInfosResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ListGroupInfosResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ groupInfo_ = new java.util.ArrayList();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ groupInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ groupInfo_ = java.util.Collections.unmodifiableList(groupInfo_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public ListGroupInfosResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ListGroupInfosResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ // repeated .hbase.pb.GroupInfo group_info = 1;
+ public static final int GROUP_INFO_FIELD_NUMBER = 1;
+ private java.util.List groupInfo_;
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public java.util.List getGroupInfoList() {
+ return groupInfo_;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>
+ getGroupInfoOrBuilderList() {
+ return groupInfo_;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public int getGroupInfoCount() {
+ return groupInfo_.size();
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo(int index) {
+ return groupInfo_.get(index);
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder(
+ int index) {
+ return groupInfo_.get(index);
+ }
+
+ private void initFields() {
+ groupInfo_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getGroupInfoCount(); i++) {
+ if (!getGroupInfo(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < groupInfo_.size(); i++) {
+ output.writeMessage(1, groupInfo_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < groupInfo_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, groupInfo_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse) obj;
+
+ boolean result = true;
+ result = result && getGroupInfoList()
+ .equals(other.getGroupInfoList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getGroupInfoCount() > 0) {
+ hash = (37 * hash) + GROUP_INFO_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupInfoList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListGroupInfosResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getGroupInfoFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ groupInfoBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_ListGroupInfosResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (groupInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ groupInfo_ = java.util.Collections.unmodifiableList(groupInfo_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.groupInfo_ = groupInfo_;
+ } else {
+ result.groupInfo_ = groupInfoBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.getDefaultInstance()) return this;
+ if (groupInfoBuilder_ == null) {
+ if (!other.groupInfo_.isEmpty()) {
+ if (groupInfo_.isEmpty()) {
+ groupInfo_ = other.groupInfo_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureGroupInfoIsMutable();
+ groupInfo_.addAll(other.groupInfo_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.groupInfo_.isEmpty()) {
+ if (groupInfoBuilder_.isEmpty()) {
+ groupInfoBuilder_.dispose();
+ groupInfoBuilder_ = null;
+ groupInfo_ = other.groupInfo_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ groupInfoBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getGroupInfoFieldBuilder() : null;
+ } else {
+ groupInfoBuilder_.addAllMessages(other.groupInfo_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getGroupInfoCount(); i++) {
+ if (!getGroupInfo(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // repeated .hbase.pb.GroupInfo group_info = 1;
+ private java.util.List groupInfo_ =
+ java.util.Collections.emptyList();
+ private void ensureGroupInfoIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ groupInfo_ = new java.util.ArrayList(groupInfo_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder> groupInfoBuilder_;
+
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public java.util.List getGroupInfoList() {
+ if (groupInfoBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(groupInfo_);
+ } else {
+ return groupInfoBuilder_.getMessageList();
+ }
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public int getGroupInfoCount() {
+ if (groupInfoBuilder_ == null) {
+ return groupInfo_.size();
+ } else {
+ return groupInfoBuilder_.getCount();
+ }
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo(int index) {
+ if (groupInfoBuilder_ == null) {
+ return groupInfo_.get(index);
+ } else {
+ return groupInfoBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder setGroupInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureGroupInfoIsMutable();
+ groupInfo_.set(index, value);
+ onChanged();
+ } else {
+ groupInfoBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder setGroupInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder builderForValue) {
+ if (groupInfoBuilder_ == null) {
+ ensureGroupInfoIsMutable();
+ groupInfo_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ groupInfoBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder addGroupInfo(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureGroupInfoIsMutable();
+ groupInfo_.add(value);
+ onChanged();
+ } else {
+ groupInfoBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder addGroupInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureGroupInfoIsMutable();
+ groupInfo_.add(index, value);
+ onChanged();
+ } else {
+ groupInfoBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder addGroupInfo(
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder builderForValue) {
+ if (groupInfoBuilder_ == null) {
+ ensureGroupInfoIsMutable();
+ groupInfo_.add(builderForValue.build());
+ onChanged();
+ } else {
+ groupInfoBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder addGroupInfo(
+ int index, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder builderForValue) {
+ if (groupInfoBuilder_ == null) {
+ ensureGroupInfoIsMutable();
+ groupInfo_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ groupInfoBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder addAllGroupInfo(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo> values) {
+ if (groupInfoBuilder_ == null) {
+ ensureGroupInfoIsMutable();
+ super.addAll(values, groupInfo_);
+ onChanged();
+ } else {
+ groupInfoBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder clearGroupInfo() {
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ groupInfoBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder removeGroupInfo(int index) {
+ if (groupInfoBuilder_ == null) {
+ ensureGroupInfoIsMutable();
+ groupInfo_.remove(index);
+ onChanged();
+ } else {
+ groupInfoBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder getGroupInfoBuilder(
+ int index) {
+ return getGroupInfoFieldBuilder().getBuilder(index);
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder(
+ int index) {
+ if (groupInfoBuilder_ == null) {
+ return groupInfo_.get(index); } else {
+ return groupInfoBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>
+ getGroupInfoOrBuilderList() {
+ if (groupInfoBuilder_ != null) {
+ return groupInfoBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(groupInfo_);
+ }
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder addGroupInfoBuilder() {
+ return getGroupInfoFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder addGroupInfoBuilder(
+ int index) {
+ return getGroupInfoFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.GroupInfo group_info = 1;
+ */
+ public java.util.List
+ getGroupInfoBuilderList() {
+ return getGroupInfoFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>
+ getGroupInfoFieldBuilder() {
+ if (groupInfoBuilder_ == null) {
+ groupInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>(
+ groupInfo_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ groupInfo_ = null;
+ }
+ return groupInfoBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.ListGroupInfosResponse)
+ }
+
+ static {
+ defaultInstance = new ListGroupInfosResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ListGroupInfosResponse)
+ }
+
+ public interface GetGroupInfoOfServerRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .hbase.pb.HostPort server = 1;
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ boolean hasServer();
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServer();
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServerOrBuilder();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoOfServerRequest}
+ */
+ public static final class GetGroupInfoOfServerRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements GetGroupInfoOfServerRequestOrBuilder {
+ // Use GetGroupInfoOfServerRequest.newBuilder() to construct.
+ private GetGroupInfoOfServerRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GetGroupInfoOfServerRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GetGroupInfoOfServerRequest defaultInstance;
+ public static GetGroupInfoOfServerRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetGroupInfoOfServerRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GetGroupInfoOfServerRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = server_.toBuilder();
+ }
+ server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(server_);
+ server_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public GetGroupInfoOfServerRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetGroupInfoOfServerRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required .hbase.pb.HostPort server = 1;
+ public static final int SERVER_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort server_;
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public boolean hasServer() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServer() {
+ return server_;
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServerOrBuilder() {
+ return server_;
+ }
+
+ private void initFields() {
+ server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasServer()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getServer().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, server_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, server_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest) obj;
+
+ boolean result = true;
+ result = result && (hasServer() == other.hasServer());
+ if (hasServer()) {
+ result = result && getServer()
+ .equals(other.getServer());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasServer()) {
+ hash = (37 * hash) + SERVER_FIELD_NUMBER;
+ hash = (53 * hash) + getServer().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoOfServerRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getServerFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (serverBuilder_ == null) {
+ server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance();
+ } else {
+ serverBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (serverBuilder_ == null) {
+ result.server_ = server_;
+ } else {
+ result.server_ = serverBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.getDefaultInstance()) return this;
+ if (other.hasServer()) {
+ mergeServer(other.getServer());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasServer()) {
+
+ return false;
+ }
+ if (!getServer().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required .hbase.pb.HostPort server = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder> serverBuilder_;
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public boolean hasServer() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServer() {
+ if (serverBuilder_ == null) {
+ return server_;
+ } else {
+ return serverBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort value) {
+ if (serverBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ server_ = value;
+ onChanged();
+ } else {
+ serverBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public Builder setServer(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder builderForValue) {
+ if (serverBuilder_ == null) {
+ server_ = builderForValue.build();
+ onChanged();
+ } else {
+ serverBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort value) {
+ if (serverBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance()) {
+ server_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.newBuilder(server_).mergeFrom(value).buildPartial();
+ } else {
+ server_ = value;
+ }
+ onChanged();
+ } else {
+ serverBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public Builder clearServer() {
+ if (serverBuilder_ == null) {
+ server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance();
+ onChanged();
+ } else {
+ serverBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder getServerBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getServerFieldBuilder().getBuilder();
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServerOrBuilder() {
+ if (serverBuilder_ != null) {
+ return serverBuilder_.getMessageOrBuilder();
+ } else {
+ return server_;
+ }
+ }
+ /**
+ * required .hbase.pb.HostPort server = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServerFieldBuilder() {
+ if (serverBuilder_ == null) {
+ serverBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>(
+ server_,
+ getParentForChildren(),
+ isClean());
+ server_ = null;
+ }
+ return serverBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.GetGroupInfoOfServerRequest)
+ }
+
+ static {
+ defaultInstance = new GetGroupInfoOfServerRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.GetGroupInfoOfServerRequest)
+ }
+
+ public interface GetGroupInfoOfServerResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ boolean hasGroupInfo();
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo();
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoOfServerResponse}
+ */
+ public static final class GetGroupInfoOfServerResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements GetGroupInfoOfServerResponseOrBuilder {
+ // Use GetGroupInfoOfServerResponse.newBuilder() to construct.
+ private GetGroupInfoOfServerResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GetGroupInfoOfServerResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GetGroupInfoOfServerResponse defaultInstance;
+ public static GetGroupInfoOfServerResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetGroupInfoOfServerResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GetGroupInfoOfServerResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = groupInfo_.toBuilder();
+ }
+ groupInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(groupInfo_);
+ groupInfo_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public GetGroupInfoOfServerResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetGroupInfoOfServerResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ public static final int GROUP_INFO_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo groupInfo_;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public boolean hasGroupInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo() {
+ return groupInfo_;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder() {
+ return groupInfo_;
+ }
+
+ private void initFields() {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (hasGroupInfo()) {
+ if (!getGroupInfo().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, groupInfo_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, groupInfo_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse) obj;
+
+ boolean result = true;
+ result = result && (hasGroupInfo() == other.hasGroupInfo());
+ if (hasGroupInfo()) {
+ result = result && getGroupInfo()
+ .equals(other.getGroupInfo());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasGroupInfo()) {
+ hash = (37 * hash) + GROUP_INFO_FIELD_NUMBER;
+ hash = (53 * hash) + getGroupInfo().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GetGroupInfoOfServerResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.class, org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getGroupInfoFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ } else {
+ groupInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.internal_static_hbase_pb_GetGroupInfoOfServerResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (groupInfoBuilder_ == null) {
+ result.groupInfo_ = groupInfo_;
+ } else {
+ result.groupInfo_ = groupInfoBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.getDefaultInstance()) return this;
+ if (other.hasGroupInfo()) {
+ mergeGroupInfo(other.getGroupInfo());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (hasGroupInfo()) {
+ if (!getGroupInfo().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional .hbase.pb.GroupInfo group_info = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder> groupInfoBuilder_;
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public boolean hasGroupInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getGroupInfo() {
+ if (groupInfoBuilder_ == null) {
+ return groupInfo_;
+ } else {
+ return groupInfoBuilder_.getMessage();
+ }
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder setGroupInfo(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ groupInfo_ = value;
+ onChanged();
+ } else {
+ groupInfoBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder setGroupInfo(
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder builderForValue) {
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = builderForValue.build();
+ onChanged();
+ } else {
+ groupInfoBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder mergeGroupInfo(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo value) {
+ if (groupInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ groupInfo_ != org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance()) {
+ groupInfo_ =
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.newBuilder(groupInfo_).mergeFrom(value).buildPartial();
+ } else {
+ groupInfo_ = value;
+ }
+ onChanged();
+ } else {
+ groupInfoBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public Builder clearGroupInfo() {
+ if (groupInfoBuilder_ == null) {
+ groupInfo_ = org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ onChanged();
+ } else {
+ groupInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder getGroupInfoBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getGroupInfoFieldBuilder().getBuilder();
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder getGroupInfoOrBuilder() {
+ if (groupInfoBuilder_ != null) {
+ return groupInfoBuilder_.getMessageOrBuilder();
+ } else {
+ return groupInfo_;
+ }
+ }
+ /**
+ * optional .hbase.pb.GroupInfo group_info = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>
+ getGroupInfoFieldBuilder() {
+ if (groupInfoBuilder_ == null) {
+ groupInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder>(
+ groupInfo_,
+ getParentForChildren(),
+ isClean());
+ groupInfo_ = null;
+ }
+ return groupInfoBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.GetGroupInfoOfServerResponse)
+ }
+
+ static {
+ defaultInstance = new GetGroupInfoOfServerResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.GetGroupInfoOfServerResponse)
+ }
+
+ /**
+ * Protobuf service {@code hbase.pb.GroupAdminService}
+ */
+ public static abstract class GroupAdminService
+ implements com.google.protobuf.Service {
+ protected GroupAdminService() {}
+
+ public interface Interface {
+ /**
+ * rpc GetGroupInfo(.hbase.pb.GetGroupInfoRequest) returns (.hbase.pb.GetGroupInfoResponse);
+ */
+ public abstract void getGroupInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc GetGroupInfoOfTable(.hbase.pb.GetGroupInfoOfTableRequest) returns (.hbase.pb.GetGroupInfoOfTableResponse);
+ */
+ public abstract void getGroupInfoOfTable(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc GetGroupInfoOfServer(.hbase.pb.GetGroupInfoOfServerRequest) returns (.hbase.pb.GetGroupInfoOfServerResponse);
+ */
+ public abstract void getGroupInfoOfServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc MoveServers(.hbase.pb.MoveServersRequest) returns (.hbase.pb.MoveServersResponse);
+ */
+ public abstract void moveServers(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc MoveTables(.hbase.pb.MoveTablesRequest) returns (.hbase.pb.MoveTablesResponse);
+ */
+ public abstract void moveTables(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc AddGroup(.hbase.pb.AddGroupRequest) returns (.hbase.pb.AddGroupResponse);
+ */
+ public abstract void addGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc RemoveGroup(.hbase.pb.RemoveGroupRequest) returns (.hbase.pb.RemoveGroupResponse);
+ */
+ public abstract void removeGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc BalanceGroup(.hbase.pb.BalanceGroupRequest) returns (.hbase.pb.BalanceGroupResponse);
+ */
+ public abstract void balanceGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc ListGroupInfos(.hbase.pb.ListGroupInfosRequest) returns (.hbase.pb.ListGroupInfosResponse);
+ */
+ public abstract void listGroupInfos(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new GroupAdminService() {
+ @java.lang.Override
+ public void getGroupInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getGroupInfo(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getGroupInfoOfTable(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getGroupInfoOfTable(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getGroupInfoOfServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getGroupInfoOfServer(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void moveServers(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.moveServers(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void moveTables(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.moveTables(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void addGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.addGroup(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void removeGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.removeGroup(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void balanceGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.balanceGroup(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void listGroupInfos(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.listGroupInfos(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.getGroupInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest)request);
+ case 1:
+ return impl.getGroupInfoOfTable(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest)request);
+ case 2:
+ return impl.getGroupInfoOfServer(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest)request);
+ case 3:
+ return impl.moveServers(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest)request);
+ case 4:
+ return impl.moveTables(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest)request);
+ case 5:
+ return impl.addGroup(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest)request);
+ case 6:
+ return impl.removeGroup(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest)request);
+ case 7:
+ return impl.balanceGroup(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest)request);
+ case 8:
+ return impl.listGroupInfos(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * rpc GetGroupInfo(.hbase.pb.GetGroupInfoRequest) returns (.hbase.pb.GetGroupInfoResponse);
+ */
+ public abstract void getGroupInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc GetGroupInfoOfTable(.hbase.pb.GetGroupInfoOfTableRequest) returns (.hbase.pb.GetGroupInfoOfTableResponse);
+ */
+ public abstract void getGroupInfoOfTable(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc GetGroupInfoOfServer(.hbase.pb.GetGroupInfoOfServerRequest) returns (.hbase.pb.GetGroupInfoOfServerResponse);
+ */
+ public abstract void getGroupInfoOfServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc MoveServers(.hbase.pb.MoveServersRequest) returns (.hbase.pb.MoveServersResponse);
+ */
+ public abstract void moveServers(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc MoveTables(.hbase.pb.MoveTablesRequest) returns (.hbase.pb.MoveTablesResponse);
+ */
+ public abstract void moveTables(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc AddGroup(.hbase.pb.AddGroupRequest) returns (.hbase.pb.AddGroupResponse);
+ */
+ public abstract void addGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc RemoveGroup(.hbase.pb.RemoveGroupRequest) returns (.hbase.pb.RemoveGroupResponse);
+ */
+ public abstract void removeGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc BalanceGroup(.hbase.pb.BalanceGroupRequest) returns (.hbase.pb.BalanceGroupResponse);
+ */
+ public abstract void balanceGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc ListGroupInfos(.hbase.pb.ListGroupInfosRequest) returns (.hbase.pb.ListGroupInfosResponse);
+ */
+ public abstract void listGroupInfos(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.getGroupInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.getGroupInfoOfTable(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 2:
+ this.getGroupInfoOfServer(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 3:
+ this.moveServers(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 4:
+ this.moveTables(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 5:
+ this.addGroup(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 6:
+ this.removeGroup(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 7:
+ this.balanceGroup(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 8:
+ this.listGroupInfos(controller, (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GroupAdminService implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void getGroupInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.getDefaultInstance()));
+ }
+
+ public void getGroupInfoOfTable(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.getDefaultInstance()));
+ }
+
+ public void getGroupInfoOfServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.getDefaultInstance()));
+ }
+
+ public void moveServers(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(3),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.getDefaultInstance()));
+ }
+
+ public void moveTables(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(4),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.getDefaultInstance()));
+ }
+
+ public void addGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(5),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.getDefaultInstance()));
+ }
+
+ public void removeGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(6),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.getDefaultInstance()));
+ }
+
+ public void balanceGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(7),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.getDefaultInstance()));
+ }
+
+ public void listGroupInfos(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(8),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse getGroupInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse getGroupInfoOfTable(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse getGroupInfoOfServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse moveServers(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse moveTables(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse addGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse removeGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse balanceGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse listGroupInfos(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse getGroupInfo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse getGroupInfoOfTable(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfTableResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse getGroupInfoOfServer(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.GetGroupInfoOfServerResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse moveServers(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(3),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveServersResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse moveTables(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(4),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.MoveTablesResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse addGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(5),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.AddGroupResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse removeGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(6),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.RemoveGroupResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse balanceGroup(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(7),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.BalanceGroupResponse.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse listGroupInfos(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(8),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.GroupAdminProtos.ListGroupInfosResponse.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.GroupAdminService)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_ListTablesOfGroupRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_ListTablesOfGroupRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_ListTablesOfGroupResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_ListTablesOfGroupResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_GetGroupInfoRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_GetGroupInfoRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_GetGroupInfoResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_GetGroupInfoResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_GetGroupInfoOfTableRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_GetGroupInfoOfTableRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_GetGroupInfoOfTableResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_GetGroupInfoOfTableResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_MoveServersRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_MoveServersRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_MoveServersResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_MoveServersResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_MoveTablesRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_MoveTablesRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_MoveTablesResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_MoveTablesResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_AddGroupRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_AddGroupRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_AddGroupResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_AddGroupResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_RemoveGroupRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_RemoveGroupRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_RemoveGroupResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_RemoveGroupResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_BalanceGroupRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_BalanceGroupRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_BalanceGroupResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_BalanceGroupResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_ListGroupInfosRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_ListGroupInfosRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_ListGroupInfosResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_ListGroupInfosResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_GetGroupInfoOfServerRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_GetGroupInfoOfServerRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_GetGroupInfoOfServerResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_GetGroupInfoOfServerResponse_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\020GroupAdmin.proto\022\010hbase.pb\032\013HBase.prot" +
+ "o\032\013Group.proto\".\n\030ListTablesOfGroupReque" +
+ "st\022\022\n\ngroup_name\030\001 \002(\t\"D\n\031ListTablesOfGr" +
+ "oupResponse\022\'\n\ntable_name\030\001 \003(\0132\023.hbase." +
+ "pb.TableName\")\n\023GetGroupInfoRequest\022\022\n\ng" +
+ "roup_name\030\001 \002(\t\"?\n\024GetGroupInfoResponse\022" +
+ "\'\n\ngroup_info\030\001 \001(\0132\023.hbase.pb.GroupInfo" +
+ "\"E\n\032GetGroupInfoOfTableRequest\022\'\n\ntable_" +
+ "name\030\001 \002(\0132\023.hbase.pb.TableName\"F\n\033GetGr" +
+ "oupInfoOfTableResponse\022\'\n\ngroup_info\030\001 \001",
+ "(\0132\023.hbase.pb.GroupInfo\"O\n\022MoveServersRe" +
+ "quest\022\024\n\014target_group\030\001 \002(\t\022#\n\007servers\030\002" +
+ " \003(\0132\022.hbase.pb.HostPort\"\025\n\023MoveServersR" +
+ "esponse\"R\n\021MoveTablesRequest\022\024\n\014target_g" +
+ "roup\030\001 \002(\t\022\'\n\ntable_name\030\002 \003(\0132\023.hbase.p" +
+ "b.TableName\"\024\n\022MoveTablesResponse\"%\n\017Add" +
+ "GroupRequest\022\022\n\ngroup_name\030\001 \002(\t\"\022\n\020AddG" +
+ "roupResponse\"(\n\022RemoveGroupRequest\022\022\n\ngr" +
+ "oup_name\030\001 \002(\t\"\025\n\023RemoveGroupResponse\")\n" +
+ "\023BalanceGroupRequest\022\022\n\ngroup_name\030\001 \002(\t",
+ "\"*\n\024BalanceGroupResponse\022\022\n\nbalanceRan\030\001" +
+ " \002(\010\"\027\n\025ListGroupInfosRequest\"A\n\026ListGro" +
+ "upInfosResponse\022\'\n\ngroup_info\030\001 \003(\0132\023.hb" +
+ "ase.pb.GroupInfo\"A\n\033GetGroupInfoOfServer" +
+ "Request\022\"\n\006server\030\001 \002(\0132\022.hbase.pb.HostP" +
+ "ort\"G\n\034GetGroupInfoOfServerResponse\022\'\n\ng" +
+ "roup_info\030\001 \001(\0132\023.hbase.pb.GroupInfo2\365\005\n" +
+ "\021GroupAdminService\022M\n\014GetGroupInfo\022\035.hba" +
+ "se.pb.GetGroupInfoRequest\032\036.hbase.pb.Get" +
+ "GroupInfoResponse\022b\n\023GetGroupInfoOfTable",
+ "\022$.hbase.pb.GetGroupInfoOfTableRequest\032%" +
+ ".hbase.pb.GetGroupInfoOfTableResponse\022e\n" +
+ "\024GetGroupInfoOfServer\022%.hbase.pb.GetGrou" +
+ "pInfoOfServerRequest\032&.hbase.pb.GetGroup" +
+ "InfoOfServerResponse\022J\n\013MoveServers\022\034.hb" +
+ "ase.pb.MoveServersRequest\032\035.hbase.pb.Mov" +
+ "eServersResponse\022G\n\nMoveTables\022\033.hbase.p" +
+ "b.MoveTablesRequest\032\034.hbase.pb.MoveTable" +
+ "sResponse\022A\n\010AddGroup\022\031.hbase.pb.AddGrou" +
+ "pRequest\032\032.hbase.pb.AddGroupResponse\022J\n\013",
+ "RemoveGroup\022\034.hbase.pb.RemoveGroupReques" +
+ "t\032\035.hbase.pb.RemoveGroupResponse\022M\n\014Bala" +
+ "nceGroup\022\035.hbase.pb.BalanceGroupRequest\032" +
+ "\036.hbase.pb.BalanceGroupResponse\022S\n\016ListG" +
+ "roupInfos\022\037.hbase.pb.ListGroupInfosReque" +
+ "st\032 .hbase.pb.ListGroupInfosResponseBF\n*" +
+ "org.apache.hadoop.hbase.protobuf.generat" +
+ "edB\020GroupAdminProtosH\001\210\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_hbase_pb_ListTablesOfGroupRequest_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_hbase_pb_ListTablesOfGroupRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_ListTablesOfGroupRequest_descriptor,
+ new java.lang.String[] { "GroupName", });
+ internal_static_hbase_pb_ListTablesOfGroupResponse_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_hbase_pb_ListTablesOfGroupResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_ListTablesOfGroupResponse_descriptor,
+ new java.lang.String[] { "TableName", });
+ internal_static_hbase_pb_GetGroupInfoRequest_descriptor =
+ getDescriptor().getMessageTypes().get(2);
+ internal_static_hbase_pb_GetGroupInfoRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_GetGroupInfoRequest_descriptor,
+ new java.lang.String[] { "GroupName", });
+ internal_static_hbase_pb_GetGroupInfoResponse_descriptor =
+ getDescriptor().getMessageTypes().get(3);
+ internal_static_hbase_pb_GetGroupInfoResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_GetGroupInfoResponse_descriptor,
+ new java.lang.String[] { "GroupInfo", });
+ internal_static_hbase_pb_GetGroupInfoOfTableRequest_descriptor =
+ getDescriptor().getMessageTypes().get(4);
+ internal_static_hbase_pb_GetGroupInfoOfTableRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_GetGroupInfoOfTableRequest_descriptor,
+ new java.lang.String[] { "TableName", });
+ internal_static_hbase_pb_GetGroupInfoOfTableResponse_descriptor =
+ getDescriptor().getMessageTypes().get(5);
+ internal_static_hbase_pb_GetGroupInfoOfTableResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_GetGroupInfoOfTableResponse_descriptor,
+ new java.lang.String[] { "GroupInfo", });
+ internal_static_hbase_pb_MoveServersRequest_descriptor =
+ getDescriptor().getMessageTypes().get(6);
+ internal_static_hbase_pb_MoveServersRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_MoveServersRequest_descriptor,
+ new java.lang.String[] { "TargetGroup", "Servers", });
+ internal_static_hbase_pb_MoveServersResponse_descriptor =
+ getDescriptor().getMessageTypes().get(7);
+ internal_static_hbase_pb_MoveServersResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_MoveServersResponse_descriptor,
+ new java.lang.String[] { });
+ internal_static_hbase_pb_MoveTablesRequest_descriptor =
+ getDescriptor().getMessageTypes().get(8);
+ internal_static_hbase_pb_MoveTablesRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_MoveTablesRequest_descriptor,
+ new java.lang.String[] { "TargetGroup", "TableName", });
+ internal_static_hbase_pb_MoveTablesResponse_descriptor =
+ getDescriptor().getMessageTypes().get(9);
+ internal_static_hbase_pb_MoveTablesResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_MoveTablesResponse_descriptor,
+ new java.lang.String[] { });
+ internal_static_hbase_pb_AddGroupRequest_descriptor =
+ getDescriptor().getMessageTypes().get(10);
+ internal_static_hbase_pb_AddGroupRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_AddGroupRequest_descriptor,
+ new java.lang.String[] { "GroupName", });
+ internal_static_hbase_pb_AddGroupResponse_descriptor =
+ getDescriptor().getMessageTypes().get(11);
+ internal_static_hbase_pb_AddGroupResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_AddGroupResponse_descriptor,
+ new java.lang.String[] { });
+ internal_static_hbase_pb_RemoveGroupRequest_descriptor =
+ getDescriptor().getMessageTypes().get(12);
+ internal_static_hbase_pb_RemoveGroupRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_RemoveGroupRequest_descriptor,
+ new java.lang.String[] { "GroupName", });
+ internal_static_hbase_pb_RemoveGroupResponse_descriptor =
+ getDescriptor().getMessageTypes().get(13);
+ internal_static_hbase_pb_RemoveGroupResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_RemoveGroupResponse_descriptor,
+ new java.lang.String[] { });
+ internal_static_hbase_pb_BalanceGroupRequest_descriptor =
+ getDescriptor().getMessageTypes().get(14);
+ internal_static_hbase_pb_BalanceGroupRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_BalanceGroupRequest_descriptor,
+ new java.lang.String[] { "GroupName", });
+ internal_static_hbase_pb_BalanceGroupResponse_descriptor =
+ getDescriptor().getMessageTypes().get(15);
+ internal_static_hbase_pb_BalanceGroupResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_BalanceGroupResponse_descriptor,
+ new java.lang.String[] { "BalanceRan", });
+ internal_static_hbase_pb_ListGroupInfosRequest_descriptor =
+ getDescriptor().getMessageTypes().get(16);
+ internal_static_hbase_pb_ListGroupInfosRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_ListGroupInfosRequest_descriptor,
+ new java.lang.String[] { });
+ internal_static_hbase_pb_ListGroupInfosResponse_descriptor =
+ getDescriptor().getMessageTypes().get(17);
+ internal_static_hbase_pb_ListGroupInfosResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_ListGroupInfosResponse_descriptor,
+ new java.lang.String[] { "GroupInfo", });
+ internal_static_hbase_pb_GetGroupInfoOfServerRequest_descriptor =
+ getDescriptor().getMessageTypes().get(18);
+ internal_static_hbase_pb_GetGroupInfoOfServerRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_GetGroupInfoOfServerRequest_descriptor,
+ new java.lang.String[] { "Server", });
+ internal_static_hbase_pb_GetGroupInfoOfServerResponse_descriptor =
+ getDescriptor().getMessageTypes().get(19);
+ internal_static_hbase_pb_GetGroupInfoOfServerResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_GetGroupInfoOfServerResponse_descriptor,
+ new java.lang.String[] { "GroupInfo", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/GroupProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/GroupProtos.java
new file mode 100644
index 0000000..a786424
--- /dev/null
+++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/GroupProtos.java
@@ -0,0 +1,1331 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: Group.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class GroupProtos {
+ private GroupProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface GroupInfoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string name = 1;
+ /**
+ * required string name = 1;
+ */
+ boolean hasName();
+ /**
+ * required string name = 1;
+ */
+ java.lang.String getName();
+ /**
+ * required string name = 1;
+ */
+ com.google.protobuf.ByteString
+ getNameBytes();
+
+ // repeated .hbase.pb.HostPort servers = 4;
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ java.util.List
+ getServersList();
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServers(int index);
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ int getServersCount();
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServersOrBuilderList();
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServersOrBuilder(
+ int index);
+
+ // repeated .hbase.pb.TableName tables = 3;
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ java.util.List
+ getTablesList();
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTables(int index);
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ int getTablesCount();
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTablesOrBuilderList();
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTablesOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GroupInfo}
+ */
+ public static final class GroupInfo extends
+ com.google.protobuf.GeneratedMessage
+ implements GroupInfoOrBuilder {
+ // Use GroupInfo.newBuilder() to construct.
+ private GroupInfo(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GroupInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GroupInfo defaultInstance;
+ public static GroupInfo getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GroupInfo getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GroupInfo(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ name_ = input.readBytes();
+ break;
+ }
+ case 26: {
+ if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ tables_ = new java.util.ArrayList();
+ mutable_bitField0_ |= 0x00000004;
+ }
+ tables_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
+ break;
+ }
+ case 34: {
+ if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ servers_ = new java.util.ArrayList();
+ mutable_bitField0_ |= 0x00000002;
+ }
+ servers_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ tables_ = java.util.Collections.unmodifiableList(tables_);
+ }
+ if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ servers_ = java.util.Collections.unmodifiableList(servers_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupProtos.internal_static_hbase_pb_GroupInfo_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupProtos.internal_static_hbase_pb_GroupInfo_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.class, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public GroupInfo parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GroupInfo(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string name = 1;
+ public static final int NAME_FIELD_NUMBER = 1;
+ private java.lang.Object name_;
+ /**
+ * required string name = 1;
+ */
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string name = 1;
+ */
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ name_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // repeated .hbase.pb.HostPort servers = 4;
+ public static final int SERVERS_FIELD_NUMBER = 4;
+ private java.util.List servers_;
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public java.util.List getServersList() {
+ return servers_;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServersOrBuilderList() {
+ return servers_;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public int getServersCount() {
+ return servers_.size();
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServers(int index) {
+ return servers_.get(index);
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServersOrBuilder(
+ int index) {
+ return servers_.get(index);
+ }
+
+ // repeated .hbase.pb.TableName tables = 3;
+ public static final int TABLES_FIELD_NUMBER = 3;
+ private java.util.List tables_;
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public java.util.List getTablesList() {
+ return tables_;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTablesOrBuilderList() {
+ return tables_;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public int getTablesCount() {
+ return tables_.size();
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTables(int index) {
+ return tables_.get(index);
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTablesOrBuilder(
+ int index) {
+ return tables_.get(index);
+ }
+
+ private void initFields() {
+ name_ = "";
+ servers_ = java.util.Collections.emptyList();
+ tables_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ for (int i = 0; i < getServersCount(); i++) {
+ if (!getServers(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ for (int i = 0; i < getTablesCount(); i++) {
+ if (!getTables(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getNameBytes());
+ }
+ for (int i = 0; i < tables_.size(); i++) {
+ output.writeMessage(3, tables_.get(i));
+ }
+ for (int i = 0; i < servers_.size(); i++) {
+ output.writeMessage(4, servers_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getNameBytes());
+ }
+ for (int i = 0; i < tables_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, tables_.get(i));
+ }
+ for (int i = 0; i < servers_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, servers_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo other = (org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo) obj;
+
+ boolean result = true;
+ result = result && (hasName() == other.hasName());
+ if (hasName()) {
+ result = result && getName()
+ .equals(other.getName());
+ }
+ result = result && getServersList()
+ .equals(other.getServersList());
+ result = result && getTablesList()
+ .equals(other.getTablesList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasName()) {
+ hash = (37 * hash) + NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getName().hashCode();
+ }
+ if (getServersCount() > 0) {
+ hash = (37 * hash) + SERVERS_FIELD_NUMBER;
+ hash = (53 * hash) + getServersList().hashCode();
+ }
+ if (getTablesCount() > 0) {
+ hash = (37 * hash) + TABLES_FIELD_NUMBER;
+ hash = (53 * hash) + getTablesList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.GroupInfo}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupProtos.internal_static_hbase_pb_GroupInfo_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupProtos.internal_static_hbase_pb_GroupInfo_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.class, org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getServersFieldBuilder();
+ getTablesFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ name_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (serversBuilder_ == null) {
+ servers_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ serversBuilder_.clear();
+ }
+ if (tablesBuilder_ == null) {
+ tables_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000004);
+ } else {
+ tablesBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupProtos.internal_static_hbase_pb_GroupInfo_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo build() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo result = new org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.name_ = name_;
+ if (serversBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ servers_ = java.util.Collections.unmodifiableList(servers_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.servers_ = servers_;
+ } else {
+ result.servers_ = serversBuilder_.build();
+ }
+ if (tablesBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ tables_ = java.util.Collections.unmodifiableList(tables_);
+ bitField0_ = (bitField0_ & ~0x00000004);
+ }
+ result.tables_ = tables_;
+ } else {
+ result.tables_ = tablesBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo.getDefaultInstance()) return this;
+ if (other.hasName()) {
+ bitField0_ |= 0x00000001;
+ name_ = other.name_;
+ onChanged();
+ }
+ if (serversBuilder_ == null) {
+ if (!other.servers_.isEmpty()) {
+ if (servers_.isEmpty()) {
+ servers_ = other.servers_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureServersIsMutable();
+ servers_.addAll(other.servers_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.servers_.isEmpty()) {
+ if (serversBuilder_.isEmpty()) {
+ serversBuilder_.dispose();
+ serversBuilder_ = null;
+ servers_ = other.servers_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ serversBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getServersFieldBuilder() : null;
+ } else {
+ serversBuilder_.addAllMessages(other.servers_);
+ }
+ }
+ }
+ if (tablesBuilder_ == null) {
+ if (!other.tables_.isEmpty()) {
+ if (tables_.isEmpty()) {
+ tables_ = other.tables_;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ } else {
+ ensureTablesIsMutable();
+ tables_.addAll(other.tables_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.tables_.isEmpty()) {
+ if (tablesBuilder_.isEmpty()) {
+ tablesBuilder_.dispose();
+ tablesBuilder_ = null;
+ tables_ = other.tables_;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ tablesBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getTablesFieldBuilder() : null;
+ } else {
+ tablesBuilder_.addAllMessages(other.tables_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasName()) {
+
+ return false;
+ }
+ for (int i = 0; i < getServersCount(); i++) {
+ if (!getServers(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ for (int i = 0; i < getTablesCount(); i++) {
+ if (!getTables(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.GroupProtos.GroupInfo) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string name = 1;
+ private java.lang.Object name_ = "";
+ /**
+ * required string name = 1;
+ */
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string name = 1;
+ */
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ name_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string name = 1;
+ */
+ public Builder setName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ name_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string name = 1;
+ */
+ public Builder clearName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ name_ = getDefaultInstance().getName();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string name = 1;
+ */
+ public Builder setNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ name_ = value;
+ onChanged();
+ return this;
+ }
+
+ // repeated .hbase.pb.HostPort servers = 4;
+ private java.util.List servers_ =
+ java.util.Collections.emptyList();
+ private void ensureServersIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ servers_ = new java.util.ArrayList(servers_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder> serversBuilder_;
+
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public java.util.List getServersList() {
+ if (serversBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(servers_);
+ } else {
+ return serversBuilder_.getMessageList();
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public int getServersCount() {
+ if (serversBuilder_ == null) {
+ return servers_.size();
+ } else {
+ return serversBuilder_.getCount();
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getServers(int index) {
+ if (serversBuilder_ == null) {
+ return servers_.get(index);
+ } else {
+ return serversBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder setServers(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort value) {
+ if (serversBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureServersIsMutable();
+ servers_.set(index, value);
+ onChanged();
+ } else {
+ serversBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder setServers(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder builderForValue) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ servers_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ serversBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder addServers(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort value) {
+ if (serversBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureServersIsMutable();
+ servers_.add(value);
+ onChanged();
+ } else {
+ serversBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder addServers(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort value) {
+ if (serversBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureServersIsMutable();
+ servers_.add(index, value);
+ onChanged();
+ } else {
+ serversBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder addServers(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder builderForValue) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ servers_.add(builderForValue.build());
+ onChanged();
+ } else {
+ serversBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder addServers(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder builderForValue) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ servers_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ serversBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder addAllServers(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort> values) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ super.addAll(values, servers_);
+ onChanged();
+ } else {
+ serversBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder clearServers() {
+ if (serversBuilder_ == null) {
+ servers_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ } else {
+ serversBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public Builder removeServers(int index) {
+ if (serversBuilder_ == null) {
+ ensureServersIsMutable();
+ servers_.remove(index);
+ onChanged();
+ } else {
+ serversBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder getServersBuilder(
+ int index) {
+ return getServersFieldBuilder().getBuilder(index);
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder getServersOrBuilder(
+ int index) {
+ if (serversBuilder_ == null) {
+ return servers_.get(index); } else {
+ return serversBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServersOrBuilderList() {
+ if (serversBuilder_ != null) {
+ return serversBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(servers_);
+ }
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder addServersBuilder() {
+ return getServersFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder addServersBuilder(
+ int index) {
+ return getServersFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.HostPort servers = 4;
+ */
+ public java.util.List
+ getServersBuilderList() {
+ return getServersFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>
+ getServersFieldBuilder() {
+ if (serversBuilder_ == null) {
+ serversBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder>(
+ servers_,
+ ((bitField0_ & 0x00000002) == 0x00000002),
+ getParentForChildren(),
+ isClean());
+ servers_ = null;
+ }
+ return serversBuilder_;
+ }
+
+ // repeated .hbase.pb.TableName tables = 3;
+ private java.util.List tables_ =
+ java.util.Collections.emptyList();
+ private void ensureTablesIsMutable() {
+ if (!((bitField0_ & 0x00000004) == 0x00000004)) {
+ tables_ = new java.util.ArrayList(tables_);
+ bitField0_ |= 0x00000004;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tablesBuilder_;
+
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public java.util.List getTablesList() {
+ if (tablesBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(tables_);
+ } else {
+ return tablesBuilder_.getMessageList();
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public int getTablesCount() {
+ if (tablesBuilder_ == null) {
+ return tables_.size();
+ } else {
+ return tablesBuilder_.getCount();
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTables(int index) {
+ if (tablesBuilder_ == null) {
+ return tables_.get(index);
+ } else {
+ return tablesBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder setTables(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tablesBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTablesIsMutable();
+ tables_.set(index, value);
+ onChanged();
+ } else {
+ tablesBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder setTables(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tablesBuilder_ == null) {
+ ensureTablesIsMutable();
+ tables_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ tablesBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder addTables(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tablesBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTablesIsMutable();
+ tables_.add(value);
+ onChanged();
+ } else {
+ tablesBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder addTables(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tablesBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureTablesIsMutable();
+ tables_.add(index, value);
+ onChanged();
+ } else {
+ tablesBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder addTables(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tablesBuilder_ == null) {
+ ensureTablesIsMutable();
+ tables_.add(builderForValue.build());
+ onChanged();
+ } else {
+ tablesBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder addTables(
+ int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tablesBuilder_ == null) {
+ ensureTablesIsMutable();
+ tables_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ tablesBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder addAllTables(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) {
+ if (tablesBuilder_ == null) {
+ ensureTablesIsMutable();
+ super.addAll(values, tables_);
+ onChanged();
+ } else {
+ tablesBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder clearTables() {
+ if (tablesBuilder_ == null) {
+ tables_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000004);
+ onChanged();
+ } else {
+ tablesBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public Builder removeTables(int index) {
+ if (tablesBuilder_ == null) {
+ ensureTablesIsMutable();
+ tables_.remove(index);
+ onChanged();
+ } else {
+ tablesBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTablesBuilder(
+ int index) {
+ return getTablesFieldBuilder().getBuilder(index);
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTablesOrBuilder(
+ int index) {
+ if (tablesBuilder_ == null) {
+ return tables_.get(index); } else {
+ return tablesBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTablesOrBuilderList() {
+ if (tablesBuilder_ != null) {
+ return tablesBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(tables_);
+ }
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTablesBuilder() {
+ return getTablesFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTablesBuilder(
+ int index) {
+ return getTablesFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.TableName tables = 3;
+ */
+ public java.util.List
+ getTablesBuilderList() {
+ return getTablesFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTablesFieldBuilder() {
+ if (tablesBuilder_ == null) {
+ tablesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ tables_,
+ ((bitField0_ & 0x00000004) == 0x00000004),
+ getParentForChildren(),
+ isClean());
+ tables_ = null;
+ }
+ return tablesBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.GroupInfo)
+ }
+
+ static {
+ defaultInstance = new GroupInfo(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.GroupInfo)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_GroupInfo_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_GroupInfo_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\013Group.proto\022\010hbase.pb\032\013HBase.proto\"c\n\t" +
+ "GroupInfo\022\014\n\004name\030\001 \002(\t\022#\n\007servers\030\004 \003(\013" +
+ "2\022.hbase.pb.HostPort\022#\n\006tables\030\003 \003(\0132\023.h" +
+ "base.pb.TableNameBA\n*org.apache.hadoop.h" +
+ "base.protobuf.generatedB\013GroupProtosH\001\210\001" +
+ "\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_hbase_pb_GroupInfo_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_hbase_pb_GroupInfo_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_GroupInfo_descriptor,
+ new java.lang.String[] { "Name", "Servers", "Tables", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
index 603aef2..a7f4f52 100644
--- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
+++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
@@ -16488,6 +16488,621 @@ public final class HBaseProtos {
// @@protoc_insertion_point(class_scope:RegionServerInfo)
}
+ public interface HostPortOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string host_name = 1;
+ /**
+ * required string host_name = 1;
+ */
+ boolean hasHostName();
+ /**
+ * required string host_name = 1;
+ */
+ java.lang.String getHostName();
+ /**
+ * required string host_name = 1;
+ */
+ com.google.protobuf.ByteString
+ getHostNameBytes();
+
+ // required uint32 port = 2;
+ /**
+ * required uint32 port = 2;
+ */
+ boolean hasPort();
+ /**
+ * required uint32 port = 2;
+ */
+ int getPort();
+ }
+ /**
+ * Protobuf type {@code HostPort}
+ */
+ public static final class HostPort extends
+ com.google.protobuf.GeneratedMessage
+ implements HostPortOrBuilder {
+ // Use HostPort.newBuilder() to construct.
+ private HostPort(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private HostPort(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final HostPort defaultInstance;
+ public static HostPort getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public HostPort getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private HostPort(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ hostName_ = input.readBytes();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ port_ = input.readUInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_HostPort_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_HostPort_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public HostPort parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new HostPort(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string host_name = 1;
+ public static final int HOST_NAME_FIELD_NUMBER = 1;
+ private java.lang.Object hostName_;
+ /**
+ * required string host_name = 1;
+ */
+ public boolean hasHostName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string host_name = 1;
+ */
+ public java.lang.String getHostName() {
+ java.lang.Object ref = hostName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ hostName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string host_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getHostNameBytes() {
+ java.lang.Object ref = hostName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ hostName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // required uint32 port = 2;
+ public static final int PORT_FIELD_NUMBER = 2;
+ private int port_;
+ /**
+ * required uint32 port = 2;
+ */
+ public boolean hasPort() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required uint32 port = 2;
+ */
+ public int getPort() {
+ return port_;
+ }
+
+ private void initFields() {
+ hostName_ = "";
+ port_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasHostName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasPort()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getHostNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt32(2, port_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getHostNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(2, port_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort) obj;
+
+ boolean result = true;
+ result = result && (hasHostName() == other.hasHostName());
+ if (hasHostName()) {
+ result = result && getHostName()
+ .equals(other.getHostName());
+ }
+ result = result && (hasPort() == other.hasPort());
+ if (hasPort()) {
+ result = result && (getPort()
+ == other.getPort());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasHostName()) {
+ hash = (37 * hash) + HOST_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getHostName().hashCode();
+ }
+ if (hasPort()) {
+ hash = (37 * hash) + PORT_FIELD_NUMBER;
+ hash = (53 * hash) + getPort();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code HostPort}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPortOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_HostPort_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_HostPort_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ hostName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ port_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_HostPort_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort build() {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.hostName_ = hostName_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.port_ = port_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort.getDefaultInstance()) return this;
+ if (other.hasHostName()) {
+ bitField0_ |= 0x00000001;
+ hostName_ = other.hostName_;
+ onChanged();
+ }
+ if (other.hasPort()) {
+ setPort(other.getPort());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasHostName()) {
+
+ return false;
+ }
+ if (!hasPort()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.HostPort) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string host_name = 1;
+ private java.lang.Object hostName_ = "";
+ /**
+ * required string host_name = 1;
+ */
+ public boolean hasHostName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string host_name = 1;
+ */
+ public java.lang.String getHostName() {
+ java.lang.Object ref = hostName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ hostName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string host_name = 1;
+ */
+ public com.google.protobuf.ByteString
+ getHostNameBytes() {
+ java.lang.Object ref = hostName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ hostName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string host_name = 1;
+ */
+ public Builder setHostName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ hostName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string host_name = 1;
+ */
+ public Builder clearHostName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ hostName_ = getDefaultInstance().getHostName();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string host_name = 1;
+ */
+ public Builder setHostNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ hostName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // required uint32 port = 2;
+ private int port_ ;
+ /**
+ * required uint32 port = 2;
+ */
+ public boolean hasPort() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required uint32 port = 2;
+ */
+ public int getPort() {
+ return port_;
+ }
+ /**
+ * required uint32 port = 2;
+ */
+ public Builder setPort(int value) {
+ bitField0_ |= 0x00000002;
+ port_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required uint32 port = 2;
+ */
+ public Builder clearPort() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ port_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:HostPort)
+ }
+
+ static {
+ defaultInstance = new HostPort(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:HostPort)
+ }
+
private static com.google.protobuf.Descriptors.Descriptor
internal_static_TableName_descriptor;
private static
@@ -16598,6 +17213,11 @@ public final class HBaseProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_RegionServerInfo_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_HostPort_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_HostPort_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@@ -16648,14 +17268,15 @@ public final class HBaseProtos {
" \002(\004\022\025\n\rmost_sig_bits\030\002 \002(\004\"K\n\023Namespace" +
"Descriptor\022\014\n\004name\030\001 \002(\014\022&\n\rconfiguratio",
"n\030\002 \003(\0132\017.NameStringPair\"$\n\020RegionServer" +
- "Info\022\020\n\010infoPort\030\001 \001(\005*r\n\013CompareType\022\010\n" +
- "\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n" +
- "\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GR" +
- "EATER\020\005\022\t\n\005NO_OP\020\006*n\n\010TimeUnit\022\017\n\013NANOSE" +
- "CONDS\020\001\022\020\n\014MICROSECONDS\020\002\022\020\n\014MILLISECOND" +
- "S\020\003\022\013\n\007SECONDS\020\004\022\013\n\007MINUTES\020\005\022\t\n\005HOURS\020\006" +
- "\022\010\n\004DAYS\020\007B>\n*org.apache.hadoop.hbase.pr" +
- "otobuf.generatedB\013HBaseProtosH\001\240\001\001"
+ "Info\022\020\n\010infoPort\030\001 \001(\005\"+\n\010HostPort\022\021\n\tho" +
+ "st_name\030\001 \002(\t\022\014\n\004port\030\002 \002(\r*r\n\013CompareTy" +
+ "pe\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL" +
+ "\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022" +
+ "\013\n\007GREATER\020\005\022\t\n\005NO_OP\020\006*n\n\010TimeUnit\022\017\n\013N" +
+ "ANOSECONDS\020\001\022\020\n\014MICROSECONDS\020\002\022\020\n\014MILLIS" +
+ "ECONDS\020\003\022\013\n\007SECONDS\020\004\022\013\n\007MINUTES\020\005\022\t\n\005HO" +
+ "URS\020\006\022\010\n\004DAYS\020\007B>\n*org.apache.hadoop.hba" +
+ "se.protobuf.generatedB\013HBaseProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -16794,6 +17415,12 @@ public final class HBaseProtos {
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_RegionServerInfo_descriptor,
new java.lang.String[] { "InfoPort", });
+ internal_static_HostPort_descriptor =
+ getDescriptor().getMessageTypes().get(22);
+ internal_static_HostPort_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_HostPort_descriptor,
+ new java.lang.String[] { "HostName", "Port", });
return null;
}
};
diff --git hbase-protocol/src/main/protobuf/Group.proto hbase-protocol/src/main/protobuf/Group.proto
new file mode 100644
index 0000000..1cd1598
--- /dev/null
+++ hbase-protocol/src/main/protobuf/Group.proto
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.protobuf.generated";
+option java_outer_classname = "GroupProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+import "HBase.proto";
+
+message GroupInfo {
+ required string name = 1;
+ repeated HostPort servers = 4;
+ repeated TableName tables = 3;
+}
+
diff --git hbase-protocol/src/main/protobuf/GroupAdmin.proto hbase-protocol/src/main/protobuf/GroupAdmin.proto
new file mode 100644
index 0000000..b6a1feb
--- /dev/null
+++ hbase-protocol/src/main/protobuf/GroupAdmin.proto
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.protobuf.generated";
+option java_outer_classname = "GroupAdminProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+import "HBase.proto";
+import "Group.proto";
+
+/** Group level protobufs */
+
+message ListTablesOfGroupRequest {
+ required string group_name = 1;
+}
+
+message ListTablesOfGroupResponse {
+ repeated TableName table_name = 1;
+}
+
+message GetGroupInfoRequest {
+ required string group_name = 1;
+}
+
+message GetGroupInfoResponse {
+ optional GroupInfo group_info = 1;
+}
+
+message GetGroupInfoOfTableRequest {
+ required TableName table_name = 1;
+}
+
+message GetGroupInfoOfTableResponse {
+ optional GroupInfo group_info = 1;
+}
+
+message MoveServersRequest {
+ required string target_group = 1;
+ repeated HostPort servers = 2;
+}
+
+message MoveServersResponse {
+}
+
+message MoveTablesRequest {
+ required string target_group = 1;
+ repeated TableName table_name = 2;
+}
+
+message MoveTablesResponse {
+}
+
+message AddGroupRequest {
+ required string group_name = 1;
+}
+
+message AddGroupResponse {
+}
+
+message RemoveGroupRequest {
+ required string group_name = 1;
+}
+
+message RemoveGroupResponse {
+}
+
+message BalanceGroupRequest {
+ required string group_name = 1;
+}
+
+message BalanceGroupResponse {
+ required bool balanceRan = 1;
+}
+
+message ListGroupInfosRequest {
+}
+
+message ListGroupInfosResponse {
+ repeated GroupInfo group_info = 1;
+}
+
+message GetGroupInfoOfServerRequest {
+ required HostPort server = 1;
+}
+
+message GetGroupInfoOfServerResponse {
+ optional GroupInfo group_info = 1;
+}
+
+service GroupAdminService {
+ rpc GetGroupInfo(GetGroupInfoRequest)
+ returns (GetGroupInfoResponse);
+
+ rpc GetGroupInfoOfTable(GetGroupInfoOfTableRequest)
+ returns (GetGroupInfoOfTableResponse);
+
+ rpc GetGroupInfoOfServer(GetGroupInfoOfServerRequest)
+ returns (GetGroupInfoOfServerResponse);
+
+ rpc MoveServers(MoveServersRequest)
+ returns (MoveServersResponse);
+
+ rpc MoveTables(MoveTablesRequest)
+ returns (MoveTablesResponse);
+
+ rpc AddGroup(AddGroupRequest)
+ returns (AddGroupResponse);
+
+ rpc RemoveGroup(RemoveGroupRequest)
+ returns (RemoveGroupResponse);
+
+ rpc BalanceGroup(BalanceGroupRequest)
+ returns (BalanceGroupResponse);
+
+ rpc ListGroupInfos(ListGroupInfosRequest)
+ returns (ListGroupInfosResponse);
+}
diff --git hbase-protocol/src/main/protobuf/HBase.proto hbase-protocol/src/main/protobuf/HBase.proto
index f78163e..33fa7b9 100644
--- hbase-protocol/src/main/protobuf/HBase.proto
+++ hbase-protocol/src/main/protobuf/HBase.proto
@@ -218,3 +218,9 @@ message NamespaceDescriptor {
message RegionServerInfo {
optional int32 infoPort = 1;
}
+
+message HostPort {
+ required string host_name = 1;
+ required uint32 port = 2;
+}
+
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
index d1045a2..9f02fe8 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
@@ -19,22 +19,25 @@
package org.apache.hadoop.hbase.coprocessor;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import java.io.IOException;
-import java.util.List;
+import com.google.common.net.HostAndPort;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving
@@ -391,7 +394,7 @@ public abstract class BaseMasterAndRegionObserver extends BaseRegionObserver
final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
throws IOException {
}
-
+
@Override
public void preListSnapshot(final ObserverContext ctx,
final SnapshotDescription snapshot) throws IOException {
@@ -478,7 +481,7 @@ public abstract class BaseMasterAndRegionObserver extends BaseRegionObserver
public void postTableFlush(ObserverContext ctx,
TableName tableName) throws IOException {
}
-
+
@Override
public void preSetUserQuota(final ObserverContext ctx,
final String userName, final Quotas quotas) throws IOException {
@@ -528,4 +531,54 @@ public abstract class BaseMasterAndRegionObserver extends BaseRegionObserver
public void postSetNamespaceQuota(final ObserverContext ctx,
final String namespace, final Quotas quotas) throws IOException {
}
+
+ @Override
+ public void postAddGroup(ObserverContext ctx, String name)
+ throws IOException {
+ }
+
+ @Override
+ public void postBalanceGroup(ObserverContext ctx,
+ String groupName, boolean balancerRan) throws IOException {
+ }
+
+ @Override
+ public void postMoveServers(ObserverContext ctx, Set
+ servers, String targetGroup) throws IOException {
+ }
+
+ @Override
+ public void postMoveTables(ObserverContext ctx, Set
+ tables, String targetGroup) throws IOException {
+ }
+
+ @Override
+ public void postRemoveGroup(ObserverContext ctx, String name)
+ throws IOException {
+ }
+
+ @Override
+ public void preAddGroup(ObserverContext ctx, String name)
+ throws IOException {
+ }
+
+ @Override
+ public void preBalanceGroup(ObserverContext ctx, String groupName)
+ throws IOException {
+ }
+
+ @Override
+ public void preMoveServers(ObserverContext ctx,
+ Set servers, String targetGroup) throws IOException {
+ }
+
+ @Override
+ public void preMoveTables(ObserverContext ctx,
+ Set tables, String targetGroup) throws IOException {
+ }
+
+ @Override
+ public void preRemoveGroup(ObserverContext ctx, String name)
+ throws IOException {
+ }
}
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
index b1f6f4b..ed9ad5a 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
@@ -19,22 +19,25 @@
package org.apache.hadoop.hbase.coprocessor;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import java.io.IOException;
-import java.util.List;
+import com.google.common.net.HostAndPort;
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.CONFIG})
@InterfaceStability.Evolving
@@ -394,7 +397,7 @@ public class BaseMasterObserver implements MasterObserver {
public void postListSnapshot(final ObserverContext ctx,
final SnapshotDescription snapshot) throws IOException {
}
-
+
@Override
public void preCloneSnapshot(final ObserverContext ctx,
final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
@@ -522,4 +525,55 @@ public class BaseMasterObserver implements MasterObserver {
public void postSetNamespaceQuota(final ObserverContext ctx,
final String namespace, final Quotas quotas) throws IOException {
}
+
+ @Override
+ public void preMoveServers(ObserverContext ctx, Set
+ servers, String targetGroup) throws IOException {
+ }
+
+ @Override
+ public void postMoveServers(ObserverContext ctx, Set
+ servers, String targetGroup) throws IOException {
+ }
+
+ @Override
+ public void preMoveTables(ObserverContext ctx, Set
+ tables, String targetGroup) throws IOException {
+ }
+
+ @Override
+ public void postMoveTables(ObserverContext ctx,
+ Set tables, String targetGroup) throws IOException {
+ }
+
+ @Override
+ public void preAddGroup(ObserverContext ctx, String name)
+ throws IOException {
+ }
+
+ @Override
+ public void postAddGroup(ObserverContext ctx, String name)
+ throws IOException {
+ }
+
+ @Override
+ public void preRemoveGroup(ObserverContext ctx, String name)
+ throws IOException {
+
+ }
+
+ @Override
+ public void postRemoveGroup(ObserverContext ctx, String name)
+ throws IOException {
+ }
+
+ @Override
+ public void preBalanceGroup(ObserverContext ctx, String groupName)
+ throws IOException {
+ }
+
+ @Override
+ public void postBalanceGroup(ObserverContext ctx,
+ String groupName, boolean balancerRan) throws IOException {
+ }
}
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index 1136cd0..fbde269 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -21,21 +21,24 @@ package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException;
import java.util.List;
+import java.util.Set;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
+import com.google.common.net.HostAndPort;
+
/**
* Defines coprocessor hooks for interacting with operations on the
* {@link org.apache.hadoop.hbase.master.HMaster} process.
@@ -616,7 +619,7 @@ public interface MasterObserver extends Coprocessor {
*/
void postListSnapshot(final ObserverContext ctx,
final SnapshotDescription snapshot) throws IOException;
-
+
/**
* Called before a snapshot is cloned.
* Called as part of restoreSnapshot RPC call.
@@ -863,7 +866,7 @@ public interface MasterObserver extends Coprocessor {
*/
void postTableFlush(final ObserverContext ctx,
final TableName tableName) throws IOException;
-
+
/**
* Called before the quota for the user is stored.
* @param ctx the environment to interact with the framework and master
@@ -967,4 +970,98 @@ public interface MasterObserver extends Coprocessor {
*/
void postSetNamespaceQuota(final ObserverContext ctx,
final String namespace, final Quotas quotas) throws IOException;
+
+ /**
+ * Called before servers are moved to target region server group
+ * @param ctx the environment to interact with the framework and master
+ * @param servers
+ * @param targetGroup
+ * @throws IOException
+ */
+ void preMoveServers(final ObserverContext ctx,
+ Set servers, String targetGroup) throws IOException;
+
+ /**
+ * Called after servers are moved to target region server group
+ * @param ctx the environment to interact with the framework and master
+ * @param servers
+ * @param targetGroup
+ * @throws IOException
+ */
+ void postMoveServers(final ObserverContext ctx,
+ Set servers, String targetGroup) throws IOException;
+
+ /**
+ * Called before tables are moved to target region server group
+ * @param ctx the environment to interact with the framework and master
+ * @param tables
+ * @param targetGroup
+ * @throws IOException
+ */
+ void preMoveTables(final ObserverContext ctx,
+ Set