diff --git src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java index 0129ee9..238740a 100644 --- src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java +++ src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java @@ -31,18 +31,15 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.ipc.RemoteException; /** * Reads region and assignment information from .META.. @@ -253,19 +250,6 @@ public class MetaReader { } /** - * Reads the location of META from ROOT. - * @param metaServer connection to server hosting ROOT - * @return location of META in ROOT where location, or null if not available - * @throws IOException - * @deprecated Does not retry; use #getMetaRegionLocation(CatalogTracker) - */ - public static ServerName readMetaLocation(HRegionInterface metaServer) - throws IOException { - return readLocation(metaServer, CatalogTracker.ROOT_REGION_NAME, - CatalogTracker.META_REGION_NAME); - } - - /** * Gets the location of .META. region by reading content of * -ROOT-. * @param ct @@ -292,50 +276,6 @@ public class MetaReader { return (pair == null || pair.getSecond() == null)? null: pair.getSecond(); } - // TODO: Remove when deprecated dependencies are removed. - private static ServerName readLocation(HRegionInterface metaServer, - byte [] catalogRegionName, byte [] regionName) - throws IOException { - Result r = null; - try { - r = metaServer.get(catalogRegionName, - new Get(regionName). - addColumn(HConstants.CATALOG_FAMILY, - HConstants.SERVER_QUALIFIER). - addColumn(HConstants.CATALOG_FAMILY, - HConstants.STARTCODE_QUALIFIER)); - } catch (java.net.SocketTimeoutException e) { - // Treat this exception + message as unavailable catalog table. Catch it - // and fall through to return a null - } catch (java.net.SocketException e) { - // Treat this exception + message as unavailable catalog table. Catch it - // and fall through to return a null - } catch (RemoteException re) { - IOException ioe = re.unwrapRemoteException(); - if (ioe instanceof NotServingRegionException) { - // Treat this NSRE as unavailable table. Catch and fall through to - // return null below - } else if (ioe.getMessage().contains("Server not running")) { - // Treat as unavailable table. - } else { - throw re; - } - } catch (IOException e) { - if (e.getCause() != null && e.getCause() instanceof IOException && - e.getCause().getMessage() != null && - e.getCause().getMessage().contains("Connection reset by peer")) { - // Treat this exception + message as unavailable catalog table. Catch it - // and fall through to return a null - } else { - throw e; - } - } - if (r == null || r.isEmpty()) { - return null; - } - return getServerNameFromCatalogResult(r); - } - /** * Gets the region info and assignment for the specified region. * @param catalogTracker @@ -655,35 +595,6 @@ public class MetaReader { } /** - * Fully scan a given region, on a given server starting with given row. - * @param hRegionInterface region server - * @param visitor visitor - * @param regionName name of region - * @param startrow start row - * @throws IOException - * @deprecated Does not retry; use fullScan xxx instead. - x - */ - public static void fullScan(HRegionInterface hRegionInterface, - Visitor visitor, final byte[] regionName, - byte[] startrow) throws IOException { - if (hRegionInterface == null) return; - Scan scan = new Scan(); - if (startrow != null) scan.setStartRow(startrow); - scan.addFamily(HConstants.CATALOG_FAMILY); - long scannerid = hRegionInterface.openScanner(regionName, scan); - try { - Result data; - while((data = hRegionInterface.next(scannerid)) != null) { - if (!data.isEmpty()) visitor.visit(data); - } - } finally { - hRegionInterface.close(scannerid); - } - return; - } - - /** * Performs a full scan of a catalog table. * @param catalogTracker * @param visitor Visitor invoked against each row. diff --git src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java index 3167f23..2afdb06 100644 --- src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java +++ src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java @@ -307,6 +307,7 @@ public class ClientScanner extends AbstractClientScanner { } // Clear region this.currentRegion = null; + callable = null; continue; } long currentTime = System.currentTimeMillis(); diff --git src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 16e4017..ee16e72 100644 --- src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -19,6 +19,17 @@ */ package org.apache.hadoop.hbase.client; +import java.io.Closeable; +import java.io.IOException; +import java.io.InterruptedIOException; +import java.net.SocketTimeoutException; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Pattern; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -45,6 +56,12 @@ import org.apache.hadoop.hbase.catalog.MetaReader; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.ipc.HMasterInterface; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; @@ -54,16 +71,7 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import org.apache.zookeeper.KeeperException; -import java.io.Closeable; -import java.io.IOException; -import java.io.InterruptedIOException; -import java.net.SocketTimeoutException; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.regex.Pattern; +import com.google.protobuf.ServiceException; /** * Provides an interface to manage HBase database table metadata + general @@ -497,23 +505,27 @@ public class HBaseAdmin implements Abortable, Closeable { }); // Wait until all regions deleted - HRegionInterface server = - connection.getHRegionConnection(firstMetaServer.getHostname(), firstMetaServer.getPort()); + ClientProtocol server = + connection.getClient(firstMetaServer.getHostname(), firstMetaServer.getPort()); for (int tries = 0; tries < (this.numRetries * this.retryLongerMultiplier); tries++) { - long scannerId = -1L; try { Scan scan = MetaReader.getScanForTableName(tableName); - scan.addColumn(HConstants.CATALOG_FAMILY, - HConstants.REGIONINFO_QUALIFIER); - scannerId = server.openScanner( - firstMetaServer.getRegionInfo().getRegionName(), scan); + scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); + ScanRequest request = RequestConverter.buildScanRequest( + firstMetaServer.getRegionInfo().getRegionName(), scan, 1, true); + Result[] values = null; // Get a batch at a time. - Result values = server.next(scannerId); + try { + ScanResponse response = server.scan(null, request); + values = ResponseConverter.getResults(response); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } // let us wait until .META. table is updated and // HMaster removes the table from its HTableDescriptors - if (values == null) { + if (values == null || values.length == 0) { boolean tableExists = false; HTableDescriptor[] htds; MasterKeepAliveConnection master = connection.getKeepAliveMaster(); @@ -542,14 +554,6 @@ public class HBaseAdmin implements Abortable, Closeable { throw ex; } } - } finally { - if (scannerId != -1L) { - try { - server.close(scannerId); - } catch (IOException ex) { - LOG.warn(ex); - } - } } try { Thread.sleep(getPauseTime(tries)); diff --git src/main/java/org/apache/hadoop/hbase/client/HConnection.java src/main/java/org/apache/hadoop/hbase/client/HConnection.java index 5d43086..23f8e5a 100644 --- src/main/java/org/apache/hadoop/hbase/client/HConnection.java +++ src/main/java/org/apache/hadoop/hbase/client/HConnection.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.ipc.HMasterInterface; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; /** @@ -221,6 +222,19 @@ public interface HConnection extends Abortable, Closeable { throws IOException; /** + * Establishes a connection to the region server at the specified address, and return + * a region client protocol. + * + * @param hostname RegionServer hostname + * @param port RegionServer port + * @return ClientProtocol proxy for RegionServer + * @throws IOException if a remote or network exception occurs + * + */ + public ClientProtocol getClient(final String hostname, final int port) + throws IOException; + + /** * Establishes a connection to the region server at the specified address. * @param regionServer - the server to connect to * @param getMaster - do we check if master is alive diff --git src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java index 0b783ce..820e2a9 100644 --- src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java +++ src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java @@ -19,6 +19,33 @@ */ package org.apache.hadoop.hbase.client; +import java.io.Closeable; +import java.io.IOException; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.lang.reflect.UndeclaredThrowableException; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeMap; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -46,6 +73,13 @@ import org.apache.hadoop.hbase.ipc.ExecRPCInvoker; import org.apache.hadoop.hbase.ipc.HBaseRPC; import org.apache.hadoop.hbase.ipc.HMasterInterface; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.ipc.VersionedProtocol; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; @@ -61,32 +95,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.ipc.RemoteException; import org.apache.zookeeper.KeeperException; -import java.io.Closeable; -import java.io.IOException; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.lang.reflect.Proxy; -import java.lang.reflect.UndeclaredThrowableException; -import java.net.InetSocketAddress; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.TreeMap; -import java.util.concurrent.Callable; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArraySet; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; +import com.google.protobuf.ServiceException; /** * A non-instantiable class that manages {@link HConnection}s. @@ -146,6 +155,12 @@ public class HConnectionManager { public static final int MAX_CACHED_HBASE_INSTANCES; + /** Parameter name for what client protocol to use. */ + public static final String CLIENT_PROTOCOL_CLASS = "hbase.clientprotocol.class"; + + /** Default client protocol class name. */ + public static final String DEFAULT_CLIENT_PROTOCOL_CLASS = ClientProtocol.class.getName(); + private static final Log LOG = LogFactory.getLog(HConnectionManager.class); static { @@ -493,6 +508,7 @@ public class HConnectionManager { static class HConnectionImplementation implements HConnection, Closeable { static final Log LOG = LogFactory.getLog(HConnectionImplementation.class); private final Class serverInterfaceClass; + private final Class clientClass; private final long pause; private final int numRetries; private final int maxRPCAttempts; @@ -521,8 +537,8 @@ public class HConnectionManager { private final Configuration conf; // Known region HServerAddress.toString() -> HRegionInterface - private final Map servers = - new ConcurrentHashMap(); + private final ConcurrentHashMap> servers = + new ConcurrentHashMap>(); private final ConcurrentHashMap connectionLock = new ConcurrentHashMap(); @@ -570,6 +586,15 @@ public class HConnectionManager { throw new UnsupportedOperationException( "Unable to find region server interface " + serverClassName, e); } + String clientClassName = conf.get(CLIENT_PROTOCOL_CLASS, + DEFAULT_CLIENT_PROTOCOL_CLASS); + try { + this.clientClass = + (Class) Class.forName(clientClassName); + } catch (ClassNotFoundException e) { + throw new UnsupportedOperationException( + "Unable to find client protocol " + clientClassName, e); + } this.pause = conf.getLong(HConstants.HBASE_CLIENT_PAUSE, HConstants.DEFAULT_HBASE_CLIENT_PAUSE); this.numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, @@ -1329,18 +1354,28 @@ public class HConnectionManager { } @Override + public ClientProtocol getClient( + final String hostname, final int port) throws IOException { + return (ClientProtocol)getProtocol(hostname, port, + clientClass, ClientProtocol.VERSION); + } + + @Override @Deprecated public HRegionInterface getHRegionConnection(HServerAddress hsa, boolean master) throws IOException { - return getHRegionConnection(null, -1, hsa.getInetSocketAddress(), master); + String hostname = hsa.getInetSocketAddress().getHostName(); + int port = hsa.getInetSocketAddress().getPort(); + return getHRegionConnection(hostname, port, master); } @Override public HRegionInterface getHRegionConnection(final String hostname, final int port, final boolean master) throws IOException { - return getHRegionConnection(hostname, port, null, master); + return (HRegionInterface)getProtocol(hostname, port, + serverInterfaceClass, HRegionInterface.VERSION); } /** @@ -1348,43 +1383,44 @@ public class HConnectionManager { * can be but not both. * @param hostname * @param port - * @param isa - * @param master + * @param protocolClass + * @param version * @return Proxy. * @throws IOException */ - HRegionInterface getHRegionConnection(final String hostname, final int port, - final InetSocketAddress isa, final boolean master) - throws IOException { - HRegionInterface server; - String rsName = null; - if (isa != null) { - rsName = Addressing.createHostAndPortStr(isa.getHostName(), - isa.getPort()); - } else { - rsName = Addressing.createHostAndPortStr(hostname, port); - } + VersionedProtocol getProtocol(final String hostname, + final int port, final Class protocolClass, + final long version) throws IOException { + String rsName = Addressing.createHostAndPortStr(hostname, port); // See if we already have a connection (common case) - server = this.servers.get(rsName); + Map protocols = this.servers.get(rsName); + if (protocols == null) { + protocols = new HashMap(); + Map existingProtocols = + this.servers.putIfAbsent(rsName, protocols); + if (existingProtocols != null) { + protocols = existingProtocols; + } + } + String protocol = protocolClass.getName(); + VersionedProtocol server = protocols.get(protocol); if (server == null) { - // create a unique lock for this RS (if necessary) - this.connectionLock.putIfAbsent(rsName, rsName); + // create a unique lock for this RS + protocol (if necessary) + String lockKey = protocol + "@" + rsName; + this.connectionLock.putIfAbsent(lockKey, lockKey); // get the RS lock - synchronized (this.connectionLock.get(rsName)) { + synchronized (this.connectionLock.get(lockKey)) { // do one more lookup in case we were stalled above - server = this.servers.get(rsName); + server = protocols.get(protocol); if (server == null) { try { // Only create isa when we need to. - InetSocketAddress address = isa != null? isa: - new InetSocketAddress(hostname, port); + InetSocketAddress address = new InetSocketAddress(hostname, port); // definitely a cache miss. establish an RPC for this RS - server = (HRegionInterface) HBaseRPC.waitForProxy( - serverInterfaceClass, HRegionInterface.VERSION, - address, this.conf, + server = HBaseRPC.waitForProxy( + protocolClass, version, address, this.conf, this.maxRPCAttempts, this.rpcTimeout, this.rpcTimeout); - this.servers.put(Addressing.createHostAndPortStr( - address.getHostName(), address.getPort()), server); + protocols.put(protocol, server); } catch (RemoteException e) { LOG.warn("RemoteException connecting to RS", e); // Throw what the RemoteException was carrying. @@ -1679,11 +1715,42 @@ public class HConnectionManager { ServerCallable callable = new ServerCallable(connection, tableName, null) { public MultiResponse call() throws IOException { - return server.multi(multi); + try { + MultiResponse response = new MultiResponse(); + for (Map.Entry>> e: multi.actions.entrySet()) { + byte[] regionName = e.getKey(); + int rowMutations = 0; + List> actions = e.getValue(); + for (Action action: actions) { + Row row = action.getAction(); + if (row instanceof RowMutations) { + MultiRequest request = + RequestConverter.buildMultiRequest(regionName, (RowMutations)row); + server.multi(null, request); + response.add(regionName, action.getOriginalIndex(), new Result()); + rowMutations++; + } + } + if (actions.size() > rowMutations) { + MultiRequest request = + RequestConverter.buildMultiRequest(regionName, actions); + ClientProtos.MultiResponse + proto = server.multi(null, request); + List results = ResponseConverter.getResults(proto); + for (int i = 0, n = results.size(); i < n; i++) { + int originalIndex = actions.get(i).getOriginalIndex(); + response.add(regionName, originalIndex, results.get(i)); + } + } + } + return response; + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } @Override public void connect(boolean reload) throws IOException { - server = connection.getHRegionConnection( + server = connection.getClient( loc.getHostname(), loc.getPort()); } }; @@ -1885,7 +1952,7 @@ public class HConnectionManager { } } } catch (ExecutionException e) { - LOG.warn("Failed all from " + loc, e); + LOG.debug("Failed all from " + loc, e); } } @@ -2077,8 +2144,10 @@ public class HConnectionManager { delayedClosing.stop("Closing connection"); if (stopProxy) { closeMaster(); - for (HRegionInterface i : servers.values()) { - HBaseRPC.stopProxy(i); + for (Map i : servers.values()) { + for (VersionedProtocol server: i.values()) { + HBaseRPC.stopProxy(server); + } } } closeZooKeeperWatcher(); diff --git src/main/java/org/apache/hadoop/hbase/client/HTable.java src/main/java/org/apache/hadoop/hbase/client/HTable.java index aa7652f..2c87d50 100644 --- src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -53,13 +53,27 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.HConnectionManager.HConnectable; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.ipc.ExecRPCInvoker; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Writables; +import com.google.protobuf.ServiceException; + /** *

Used to communicate with a single HBase table. * @@ -648,8 +662,15 @@ public class HTable implements HTableInterface { throws IOException { return new ServerCallable(connection, tableName, row, operationTimeout) { public Result call() throws IOException { - return server.getClosestRowBefore(location.getRegionInfo().getRegionName(), - row, family); + try { + GetRequest request = RequestConverter.buildGetRequest( + location.getRegionInfo().getRegionName(), row, family, true); + GetResponse response = server.get(null, request); + if (!response.hasResult()) return null; + return ProtobufUtil.toResult(response.getResult()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -694,7 +715,14 @@ public class HTable implements HTableInterface { public Result get(final Get get) throws IOException { return new ServerCallable(connection, tableName, get.getRow(), operationTimeout) { public Result call() throws IOException { - return server.get(location.getRegionInfo().getRegionName(), get); + try { + GetRequest request = RequestConverter.buildGetRequest( + location.getRegionInfo().getRegionName(), get); + GetResponse response = server.get(null, request); + return ProtobufUtil.toResult(response.getResult()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -746,13 +774,18 @@ public class HTable implements HTableInterface { @Override public void delete(final Delete delete) throws IOException { - new ServerCallable(connection, tableName, delete.getRow(), - operationTimeout) { - public Void call() throws IOException { - server.delete(location.getRegionInfo().getRegionName(), delete); - return null; - } - }.withRetries(); + new ServerCallable(connection, tableName, delete.getRow(), operationTimeout) { + public Boolean call() throws IOException { + try { + MutateRequest request = RequestConverter.buildMutateRequest( + location.getRegionInfo().getRegionName(), delete); + MutateResponse response = server.mutate(null, request); + return Boolean.valueOf(response.getProcessed()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + } + }.withRetries(); } /** @@ -821,7 +854,13 @@ public class HTable implements HTableInterface { new ServerCallable(connection, tableName, rm.getRow(), operationTimeout) { public Void call() throws IOException { - server.mutateRow(location.getRegionInfo().getRegionName(), rm); + try { + MultiRequest request = RequestConverter.buildMultiRequest( + location.getRegionInfo().getRegionName(), rm); + server.multi(null, request); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } return null; } }.withRetries(); @@ -838,8 +877,15 @@ public class HTable implements HTableInterface { } return new ServerCallable(connection, tableName, append.getRow(), operationTimeout) { public Result call() throws IOException { - return server.append( + try { + MutateRequest request = RequestConverter.buildMutateRequest( location.getRegionInfo().getRegionName(), append); + MutateResponse response = server.mutate(null, request); + if (!response.hasResult()) return null; + return ProtobufUtil.toResult(response.getResult()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -855,8 +901,14 @@ public class HTable implements HTableInterface { } return new ServerCallable(connection, tableName, increment.getRow(), operationTimeout) { public Result call() throws IOException { - return server.increment( + try { + MutateRequest request = RequestConverter.buildMutateRequest( location.getRegionInfo().getRegionName(), increment); + MutateResponse response = server.mutate(null, request); + return ProtobufUtil.toResult(response.getResult()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -890,9 +942,16 @@ public class HTable implements HTableInterface { } return new ServerCallable(connection, tableName, row, operationTimeout) { public Long call() throws IOException { - return server.incrementColumnValue( + try { + MutateRequest request = RequestConverter.buildMutateRequest( location.getRegionInfo().getRegionName(), row, family, qualifier, amount, writeToWAL); + MutateResponse response = server.mutate(null, request); + Result result = ProtobufUtil.toResult(response.getResult()); + return Long.valueOf(Bytes.toLong(result.getValue(family, qualifier))); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -907,8 +966,15 @@ public class HTable implements HTableInterface { throws IOException { return new ServerCallable(connection, tableName, row, operationTimeout) { public Boolean call() throws IOException { - return server.checkAndPut(location.getRegionInfo().getRegionName(), - row, family, qualifier, value, put) ? Boolean.TRUE : Boolean.FALSE; + try { + MutateRequest request = RequestConverter.buildMutateRequest( + location.getRegionInfo().getRegionName(), row, family, qualifier, + new BinaryComparator(value), CompareType.EQUAL, put); + MutateResponse response = server.mutate(null, request); + return Boolean.valueOf(response.getProcessed()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -924,10 +990,15 @@ public class HTable implements HTableInterface { throws IOException { return new ServerCallable(connection, tableName, row, operationTimeout) { public Boolean call() throws IOException { - return server.checkAndDelete( - location.getRegionInfo().getRegionName(), - row, family, qualifier, value, delete) - ? Boolean.TRUE : Boolean.FALSE; + try { + MutateRequest request = RequestConverter.buildMutateRequest( + location.getRegionInfo().getRegionName(), row, family, qualifier, + new BinaryComparator(value), CompareType.EQUAL, delete); + MutateResponse response = server.mutate(null, request); + return Boolean.valueOf(response.getProcessed()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -939,8 +1010,14 @@ public class HTable implements HTableInterface { public boolean exists(final Get get) throws IOException { return new ServerCallable(connection, tableName, get.getRow(), operationTimeout) { public Boolean call() throws IOException { - return server. - exists(location.getRegionInfo().getRegionName(), get); + try { + GetRequest request = RequestConverter.buildGetRequest( + location.getRegionInfo().getRegionName(), get, true); + GetResponse response = server.get(null, request); + return response.getExists(); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -1026,9 +1103,14 @@ public class HTable implements HTableInterface { throws IOException { return new ServerCallable(connection, tableName, row, operationTimeout) { public RowLock call() throws IOException { - long lockId = - server.lockRow(location.getRegionInfo().getRegionName(), row); - return new RowLock(row,lockId); + try { + LockRowRequest request = RequestConverter.buildLockRowRequest( + location.getRegionInfo().getRegionName(), row); + LockRowResponse response = server.lockRow(null, request); + return new RowLock(row, response.getLockId()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } }.withRetries(); } @@ -1039,14 +1121,18 @@ public class HTable implements HTableInterface { @Override public void unlockRow(final RowLock rl) throws IOException { - new ServerCallable(connection, tableName, rl.getRow(), - operationTimeout) { - public Void call() throws IOException { - server.unlockRow(location.getRegionInfo().getRegionName(), rl - .getLockId()); - return null; - } - }.withRetries(); + new ServerCallable(connection, tableName, rl.getRow(), operationTimeout) { + public Boolean call() throws IOException { + try { + UnlockRowRequest request = RequestConverter.buildUnlockRowRequest( + location.getRegionInfo().getRegionName(), rl.getLockId()); + server.unlockRow(null, request); + return Boolean.TRUE; + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + } + }.withRetries(); } /** diff --git src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 9903df3..fe80fcf 100644 --- src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -25,15 +25,22 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RemoteExceptionHandler; +import org.apache.hadoop.hbase.client.metrics.ScanMetrics; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.DNS; +import com.google.protobuf.ServiceException; + /** * Retries scanner operations such as create, next, etc. * Used by {@link ResultScanner}s made by {@link HTable}. @@ -107,41 +114,58 @@ public class ScannerCallable extends ServerCallable { * @see java.util.concurrent.Callable#call() */ public Result [] call() throws IOException { - if (scannerId != -1L && closed) { - close(); - } else if (scannerId == -1L && !closed) { - this.scannerId = openScanner(); + if (closed) { + if (scannerId != -1) { + close(); + } } else { - Result [] rrs = null; - try { - incRPCcallsMetrics(); - rrs = server.next(scannerId, caching); - updateResultsMetrics(rrs); - } catch (IOException e) { - IOException ioe = null; - if (e instanceof RemoteException) { - ioe = RemoteExceptionHandler.decodeRemoteException((RemoteException)e); - } - if (ioe == null) throw new IOException(e); - if (ioe instanceof NotServingRegionException) { - // Throw a DNRE so that we break out of cycle of calling NSRE - // when what we need is to open scanner against new location. - // Attach NSRE to signal client that it needs to resetup scanner. - if (this.scanMetrics != null) { - this.scanMetrics.countOfNSRE.inc(); + if (scannerId == -1L) { + this.scannerId = openScanner(); + } else { + Result [] rrs = null; + try { + incRPCcallsMetrics(); + ScanRequest request = + RequestConverter.buildScanRequest(scannerId, caching, false); + try { + ScanResponse response = server.scan(null, request); + rrs = ResponseConverter.getResults(response); + if (response.hasMoreResults() + && !response.getMoreResults()) { + scannerId = -1L; + closed = true; + return null; + } + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + updateResultsMetrics(rrs); + } catch (IOException e) { + IOException ioe = null; + if (e instanceof RemoteException) { + ioe = RemoteExceptionHandler.decodeRemoteException((RemoteException)e); + } + if (ioe == null) throw new IOException(e); + if (ioe instanceof NotServingRegionException) { + // Throw a DNRE so that we break out of cycle of calling NSRE + // when what we need is to open scanner against new location. + // Attach NSRE to signal client that it needs to resetup scanner. + if (this.scanMetrics != null) { + this.scanMetrics.countOfNSRE.inc(); + } + throw new DoNotRetryIOException("Reset scanner", ioe); + } else if (ioe instanceof RegionServerStoppedException) { + // Throw a DNRE so that we break out of cycle of calling RSSE + // when what we need is to open scanner against new location. + // Attach RSSE to signal client that it needs to resetup scanner. + throw new DoNotRetryIOException("Reset scanner", ioe); + } else { + // The outer layers will retry + throw ioe; } - throw new DoNotRetryIOException("Reset scanner", ioe); - } else if (ioe instanceof RegionServerStoppedException) { - // Throw a DNRE so that we break out of cycle of calling RSSE - // when what we need is to open scanner against new location. - // Attach RSSE to signal client that it needs to resetup scanner. - throw new DoNotRetryIOException("Reset scanner", ioe); - } else { - // The outer layers will retry - throw ioe; } + return rrs; } - return rrs; } return null; } @@ -161,10 +185,12 @@ public class ScannerCallable extends ServerCallable { return; } for (Result rr : rrs) { - this.scanMetrics.countOfBytesInResults.inc(rr.getBytes().getLength()); - if (isRegionServerRemote) { - this.scanMetrics.countOfBytesInRemoteResults.inc( - rr.getBytes().getLength()); + if (rr.getBytes() != null) { + this.scanMetrics.countOfBytesInResults.inc(rr.getBytes().getLength()); + if (isRegionServerRemote) { + this.scanMetrics.countOfBytesInRemoteResults.inc( + rr.getBytes().getLength()); + } } } } @@ -175,7 +201,13 @@ public class ScannerCallable extends ServerCallable { } try { incRPCcallsMetrics(); - this.server.close(this.scannerId); + ScanRequest request = + RequestConverter.buildScanRequest(this.scannerId, 0, true); + try { + server.scan(null, request); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } catch (IOException e) { LOG.warn("Ignore, probably already closed", e); } @@ -184,8 +216,16 @@ public class ScannerCallable extends ServerCallable { protected long openScanner() throws IOException { incRPCcallsMetrics(); - return this.server.openScanner(this.location.getRegionInfo().getRegionName(), - this.scan); + ScanRequest request = + RequestConverter.buildScanRequest( + this.location.getRegionInfo().getRegionName(), + this.scan, 0, false); + try { + ScanResponse response = server.scan(null, request); + return response.getScannerId(); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } } protected Scan getScan() { diff --git src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java index ddcf9ad..2a9d86e 100644 --- src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java +++ src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ipc.HBaseRPC; -import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.ipc.RemoteException; @@ -57,7 +57,7 @@ public abstract class ServerCallable implements Callable { protected final byte [] tableName; protected final byte [] row; protected HRegionLocation location; - protected HRegionInterface server; + protected ClientProtocol server; protected int callTimeout; protected long startTime, endTime; @@ -84,8 +84,8 @@ public abstract class ServerCallable implements Callable { */ public void connect(final boolean reload) throws IOException { this.location = connection.getRegionLocation(tableName, row, reload); - this.server = connection.getHRegionConnection(location.getHostname(), - location.getPort()); + this.server = connection.getClient(location.getHostname(), + location.getPort()); } /** @return the server name @@ -224,7 +224,7 @@ public abstract class ServerCallable implements Callable { } } - private static Throwable translateException(Throwable t) throws IOException { + protected static Throwable translateException(Throwable t) throws IOException { if (t instanceof UndeclaredThrowableException) { t = t.getCause(); } diff --git src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java index 1acbdab..a179bf3 100644 --- src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java +++ src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java @@ -19,13 +19,10 @@ */ package org.apache.hadoop.hbase.filter; +import java.nio.ByteBuffer; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.ipc.HRegionInterface; -import org.apache.hadoop.hbase.util.Bytes; -import java.nio.ByteBuffer; -import java.util.HashMap; -import org.apache.hadoop.hbase.filter.*; /** * ParseConstants holds a bunch of constants related to parsing Filter Strings diff --git src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java index cbfa489..35b2c8b 100644 --- src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java +++ src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java @@ -100,6 +100,7 @@ import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableUtils; import com.google.protobuf.Message; +import com.google.protobuf.RpcController; /** * This is a customized version of the polymorphic hadoop @@ -268,6 +269,8 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur GENERIC_ARRAY_CODE = code++; addToMap(Array.class, GENERIC_ARRAY_CODE); + addToMap(RpcController.class, code++); + // make sure that this is the last statement in this static block NEXT_CLASS_CODE = code; } @@ -357,7 +360,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur } } - static Integer getClassCode(final Class c) + public static Integer getClassCode(final Class c) throws IOException { Integer code = CLASS_TO_CODE.get(c); if (code == null ) { @@ -726,7 +729,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur * @return the instantiated Message instance * @throws IOException if an IO problem occurs */ - private static Message tryInstantiateProtobuf( + public static Message tryInstantiateProtobuf( Class protoClass, DataInput dataIn) throws IOException { diff --git src/main/java/org/apache/hadoop/hbase/io/TimeRange.java src/main/java/org/apache/hadoop/hbase/io/TimeRange.java index d135393..5189c8c 100644 --- src/main/java/org/apache/hadoop/hbase/io/TimeRange.java +++ src/main/java/org/apache/hadoop/hbase/io/TimeRange.java @@ -110,6 +110,14 @@ public class TimeRange implements Writable { } /** + * Check if it is for all time + * @return true if it is for all time + */ + public boolean isAllTime() { + return allTime; + } + + /** * Check if the specified timestamp is within this TimeRange. *

* Returns true if within interval [minStamp, maxStamp), false diff --git src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java index 05ae717..d71e97e 100644 --- src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java +++ src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java @@ -19,18 +19,23 @@ */ package org.apache.hadoop.hbase.ipc; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Method; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.client.*; +import org.apache.hadoop.hbase.client.HConnection; +import org.apache.hadoop.hbase.client.ServerCallable; import org.apache.hadoop.hbase.client.coprocessor.Exec; import org.apache.hadoop.hbase.client.coprocessor.ExecResult; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse; import org.apache.hadoop.hbase.util.Bytes; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.Method; - /** * Backs a {@link CoprocessorProtocol} subclass proxy and forwards method * invocations for server execution. Note that internally this will issue a @@ -74,8 +79,13 @@ public class ExecRPCInvoker implements InvocationHandler { ServerCallable callable = new ServerCallable(connection, table, row) { public ExecResult call() throws Exception { - return server.execCoprocessor(location.getRegionInfo().getRegionName(), - exec); + byte[] regionName = location.getRegionInfo().getRegionName(); + ExecCoprocessorRequest request = + RequestConverter.buildExecCoprocessorRequest(regionName, exec); + ExecCoprocessorResponse response = + server.execCoprocessor(null, request); + Object value = ProtobufUtil.toObject(response.getValue()); + return new ExecResult(regionName, value); } }; ExecResult result = callable.withRetries(); diff --git src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java index f1f06b0..b7afa58 100644 --- src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java +++ src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java @@ -19,19 +19,23 @@ */ package org.apache.hadoop.hbase.ipc; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.Map; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.io.HbaseObjectWritable; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; import org.apache.hadoop.io.VersionMismatchException; import org.apache.hadoop.io.VersionedWritable; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.lang.reflect.Field; -import java.lang.reflect.Method; - /** A method invocation, including the method name and its parameters.*/ @InterfaceAudience.Private public class Invocation extends VersionedWritable implements Configurable { @@ -43,6 +47,17 @@ public class Invocation extends VersionedWritable implements Configurable { private long clientVersion; private int clientMethodsHash; + + // For generated protocol classes which don't have VERSION field, + // such as protobuf interfaces. + private static final Map, Long> + PROTOCOL_VERSION = new HashMap, Long>(); + + static { + PROTOCOL_VERSION.put(ClientService.BlockingInterface.class, + Long.valueOf(ClientProtocol.VERSION)); + } + private static byte RPC_VERSION = 1; public Invocation() {} @@ -51,22 +66,28 @@ public class Invocation extends VersionedWritable implements Configurable { this.methodName = method.getName(); this.parameterClasses = method.getParameterTypes(); this.parameters = parameters; - if (method.getDeclaringClass().equals(VersionedProtocol.class)) { + Class declaringClass = method.getDeclaringClass(); + if (declaringClass.equals(VersionedProtocol.class)) { //VersionedProtocol is exempted from version check. clientVersion = 0; clientMethodsHash = 0; } else { try { - Field versionField = method.getDeclaringClass().getField("VERSION"); - versionField.setAccessible(true); - this.clientVersion = versionField.getLong(method.getDeclaringClass()); + Long version = PROTOCOL_VERSION.get(declaringClass); + if (version != null) { + this.clientVersion = version.longValue(); + } else { + Field versionField = declaringClass.getField("VERSION"); + versionField.setAccessible(true); + this.clientVersion = versionField.getLong(declaringClass); + } } catch (NoSuchFieldException ex) { - throw new RuntimeException("The " + method.getDeclaringClass(), ex); + throw new RuntimeException("The " + declaringClass, ex); } catch (IllegalAccessException ex) { throw new RuntimeException(ex); } - this.clientMethodsHash = ProtocolSignature.getFingerprint(method - .getDeclaringClass().getMethods()); + this.clientMethodsHash = ProtocolSignature.getFingerprint( + declaringClass.getMethods()); } } diff --git src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java index 0573c68..9f159f2 100644 --- src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java +++ src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.client.Operation; import org.apache.hadoop.hbase.io.HbaseObjectWritable; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Objects; @@ -52,6 +53,8 @@ import org.apache.hadoop.conf.*; import org.codehaus.jackson.map.ObjectMapper; +import com.google.protobuf.ServiceException; + /** An RpcEngine implementation for Writable data. */ @InterfaceAudience.Private class WritableRpcEngine implements RpcEngine { @@ -407,6 +410,9 @@ class WritableRpcEngine implements RpcEngine { if (target instanceof IOException) { throw (IOException)target; } + if (target instanceof ServiceException) { + throw ProtobufUtil.getRemoteException((ServiceException)target); + } IOException ioe = new IOException(target.toString()); ioe.setStackTrace(target.getStackTrace()); throw ioe; diff --git src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index b71ae66..d0570b9 100644 --- src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -68,10 +68,13 @@ import org.apache.hadoop.hbase.io.Reference.Range; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; +import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; -import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; @@ -486,7 +489,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool { LOG.debug("Going to connect to server " + location + " for row " + Bytes.toStringBinary(row)); byte[] regionName = location.getRegionInfo().getRegionName(); - return server.bulkLoadHFiles(famPaths, regionName); + BulkLoadHFileRequest request = + RequestConverter.buildBulkLoadHFileRequest(famPaths, regionName); + BulkLoadHFileResponse response = + server.bulkLoadHFile(null, request); + return response.getLoaded(); } }; diff --git src/main/java/org/apache/hadoop/hbase/protobuf/AdminProtocol.java src/main/java/org/apache/hadoop/hbase/protobuf/AdminProtocol.java new file mode 100644 index 0000000..422e865 --- /dev/null +++ src/main/java/org/apache/hadoop/hbase/protobuf/AdminProtocol.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.protobuf; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.ipc.VersionedProtocol; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService; +import org.apache.hadoop.hbase.security.TokenInfo; +import org.apache.hadoop.security.KerberosInfo; + +/** + * Protocol that a HBase client uses to communicate with a region server. + */ +@KerberosInfo( + serverPrincipal = "hbase.regionserver.kerberos.principal") +@TokenInfo("HBASE_AUTH_TOKEN") +@InterfaceAudience.Private +public interface AdminProtocol extends + AdminService.BlockingInterface, VersionedProtocol { + public static final long VERSION = 1L; +} diff --git src/main/java/org/apache/hadoop/hbase/protobuf/ClientProtocol.java src/main/java/org/apache/hadoop/hbase/protobuf/ClientProtocol.java new file mode 100644 index 0000000..3d6a23a --- /dev/null +++ src/main/java/org/apache/hadoop/hbase/protobuf/ClientProtocol.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.protobuf; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.ipc.VersionedProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; +import org.apache.hadoop.hbase.security.TokenInfo; +import org.apache.hadoop.security.KerberosInfo; + +/** + * Protocol that a HBase client uses to communicate with a region server. + */ +@KerberosInfo( + serverPrincipal = "hbase.regionserver.kerberos.principal") +@TokenInfo("HBASE_AUTH_TOKEN") +@InterfaceAudience.Public +@InterfaceStability.Evolving +public interface ClientProtocol extends + ClientService.BlockingInterface, VersionedProtocol { + public static final long VERSION = 1L; +} diff --git src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 2eb57de..b056830 100644 --- src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -17,12 +17,89 @@ */ package org.apache.hadoop.hbase.protobuf; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInput; +import java.io.DataInputStream; +import java.io.DataOutput; +import java.io.DataOutputStream; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.RowLock; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.coprocessor.Exec; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.io.HbaseObjectWritable; +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo; +import org.apache.hadoop.hbase.regionserver.wal.HLog; +import org.apache.hadoop.hbase.regionserver.wal.HLogKey; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; +import com.google.protobuf.ByteString; +import com.google.protobuf.ServiceException; + /** * Protobufs utility. */ -public class ProtobufUtil { +public final class ProtobufUtil { + + private ProtobufUtil() { + } + + /** + * Primitive type to class mapping. + */ + private final static Map> + PRIMITIVES = new HashMap>(); + + static { + PRIMITIVES.put(Boolean.TYPE.getName(), Boolean.TYPE); + PRIMITIVES.put(Byte.TYPE.getName(), Byte.TYPE); + PRIMITIVES.put(Character.TYPE.getName(), Character.TYPE); + PRIMITIVES.put(Short.TYPE.getName(), Short.TYPE); + PRIMITIVES.put(Integer.TYPE.getName(), Integer.TYPE); + PRIMITIVES.put(Long.TYPE.getName(), Long.TYPE); + PRIMITIVES.put(Float.TYPE.getName(), Float.TYPE); + PRIMITIVES.put(Double.TYPE.getName(), Double.TYPE); + PRIMITIVES.put(Void.TYPE.getName(), Void.TYPE); + } + /** * Magic we put ahead of a serialized protobuf message. * For example, all znode content is protobuf messages with the below magic @@ -56,4 +133,592 @@ public class ProtobufUtil { public static int lengthOfPBMagic() { return PB_MAGIC.length; } + + /** + * Return the IOException thrown by the remote server wrapped in + * ServiceException as cause. + * + * @param se ServiceException that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException or + * a new IOException that wraps the unexpected ServiceException. + */ + public static IOException getRemoteException(ServiceException se) { + Throwable e = se.getCause(); + if (e == null) { + return new IOException(se); + } + return e instanceof IOException ? (IOException) e : new IOException(se); + } + + /** + * Convert a protocol buffer Exec to a client Exec + * + * @param proto the protocol buffer Exec to convert + * @return the converted client Exec + */ + @SuppressWarnings("unchecked") + public static Exec toExec( + final ClientProtos.Exec proto) throws IOException { + byte[] row = proto.getRow().toByteArray(); + String protocolName = proto.getProtocolName(); + String methodName = proto.getMethodName(); + List parameters = new ArrayList(); + Class protocol = null; + Method method = null; + try { + List> types = new ArrayList>(); + for (NameBytesPair parameter: proto.getParameterList()) { + String type = parameter.getName(); + Class declaredClass = PRIMITIVES.get(type); + if (declaredClass == null) { + declaredClass = Class.forName(parameter.getName()); + } + parameters.add(toObject(parameter)); + types.add(declaredClass); + } + Class [] parameterTypes = new Class [types.size()]; + types.toArray(parameterTypes); + protocol = (Class) + Class.forName(protocolName); + method = protocol.getMethod(methodName, parameterTypes); + } catch (NoSuchMethodException nsme) { + throw new IOException(nsme); + } catch (ClassNotFoundException cnfe) { + throw new IOException(cnfe); + } + Configuration conf = HBaseConfiguration.create(); + for (NameStringPair p: proto.getPropertyList()) { + conf.set(p.getName(), p.getValue()); + } + Object[] parameterObjects = new Object[parameters.size()]; + parameters.toArray(parameterObjects); + return new Exec(conf, row, protocol, + method, parameterObjects); + } + + /** + * Convert a ServerName to a protocol buffer ServerName + * + * @param serverName the ServerName to convert + * @return the converted protocol buffer ServerName + */ + public static HBaseProtos.ServerName + toServerName(final ServerName serverName) { + if (serverName == null) return null; + HBaseProtos.ServerName.Builder builder = + HBaseProtos.ServerName.newBuilder(); + builder.setHostName(serverName.getHostname()); + if (serverName.getPort() >= 0) { + builder.setPort(serverName.getPort()); + } + if (serverName.getStartcode() >= 0) { + builder.setStartCode(serverName.getStartcode()); + } + return builder.build(); + } + + /** + * Convert a RegionInfo to a HRegionInfo + * + * @param proto the RegionInfo to convert + * @return the converted HRegionInfo + */ + public static HRegionInfo + toRegionInfo(final RegionInfo proto) { + if (proto == null) return null; + byte[] tableName = proto.getTableName().toByteArray(); + long regionId = proto.getRegionId(); + byte[] startKey = null; + byte[] endKey = null; + if (proto.hasStartKey()) { + startKey = proto.getStartKey().toByteArray(); + } + if (proto.hasEndKey()) { + endKey = proto.getEndKey().toByteArray(); + } + + return new HRegionInfo(tableName, + startKey, endKey, false, regionId); + } + + /** + * Convert a HRegionInfo to a RegionInfo + * + * @param info the HRegionInfo to convert + * @return the converted RegionInfo + */ + public static RegionInfo + toRegionInfo(final HRegionInfo info) { + if (info == null) return null; + RegionInfo.Builder builder = RegionInfo.newBuilder(); + builder.setTableName(ByteString.copyFrom(info.getTableName())); + builder.setRegionId(info.getRegionId()); + if (info.getStartKey() != null) { + builder.setStartKey(ByteString.copyFrom(info.getStartKey())); + } + if (info.getEndKey() != null) { + builder.setEndKey(ByteString.copyFrom(info.getEndKey())); + } + return builder.build(); + } + + /** + * Convert a protocol buffer Get to a client Get + * + * @param get the protocol buffer Get to convert + * @return the converted client Get + * @throws IOException + */ + public static Get toGet( + final ClientProtos.Get proto) throws IOException { + if (proto == null) return null; + byte[] row = proto.getRow().toByteArray(); + RowLock rowLock = null; + if (proto.hasLockId()) { + rowLock = new RowLock(proto.getLockId()); + } + Get get = new Get(row, rowLock); + if (proto.hasCacheBlocks()) { + get.setCacheBlocks(proto.getCacheBlocks()); + } + if (proto.hasMaxVersions()) { + get.setMaxVersions(proto.getMaxVersions()); + } + if (proto.hasTimeRange()) { + HBaseProtos.TimeRange timeRange = proto.getTimeRange(); + long minStamp = 0; + long maxStamp = Long.MAX_VALUE; + if (timeRange.hasFrom()) { + minStamp = timeRange.getFrom(); + } + if (timeRange.hasTo()) { + maxStamp = timeRange.getTo(); + } + get.setTimeRange(minStamp, maxStamp); + } + if (proto.hasFilter()) { + NameBytesPair filter = proto.getFilter(); + get.setFilter((Filter)toObject(filter)); + } + for (NameBytesPair attribute: proto.getAttributeList()) { + get.setAttribute(attribute.getName(), attribute.getValue().toByteArray()); + } + if (proto.getColumnCount() > 0) { + for (Column column: proto.getColumnList()) { + byte[] family = column.getFamily().toByteArray(); + if (column.getQualifierCount() > 0) { + for (ByteString qualifier: column.getQualifierList()) { + get.addColumn(family, qualifier.toByteArray()); + } + } else { + get.addFamily(family); + } + } + } + return get; + } + + /** + * Convert a protocol buffer Mutate to a Put + * + * @param proto the protocol buffer Mutate to convert + * @return the converted client Put + * @throws DoNotRetryIOException + */ + public static Put toPut( + final Mutate proto) throws DoNotRetryIOException { + MutateType type = proto.getMutateType(); + assert type == MutateType.PUT : type.name(); + byte[] row = proto.getRow().toByteArray(); + long timestamp = HConstants.LATEST_TIMESTAMP; + if (proto.hasTimestamp()) { + timestamp = proto.getTimestamp(); + } + RowLock lock = null; + if (proto.hasLockId()) { + lock = new RowLock(proto.getLockId()); + } + Put put = new Put(row, timestamp, lock); + put.setWriteToWAL(proto.getWriteToWAL()); + for (NameBytesPair attribute: proto.getAttributeList()) { + put.setAttribute(attribute.getName(), + attribute.getValue().toByteArray()); + } + for (ColumnValue column: proto.getColumnValueList()) { + byte[] family = column.getFamily().toByteArray(); + for (QualifierValue qv: column.getQualifierValueList()) { + byte[] qualifier = qv.getQualifier().toByteArray(); + if (!qv.hasValue()) { + throw new DoNotRetryIOException( + "Missing required field: qualifer value"); + } + byte[] value = qv.getValue().toByteArray(); + long ts = timestamp; + if (qv.hasTimestamp()) { + ts = qv.getTimestamp(); + } + put.add(family, qualifier, ts, value); + } + } + return put; + } + + /** + * Convert a protocol buffer Mutate to a Delete + * + * @param proto the protocol buffer Mutate to convert + * @return the converted client Delete + */ + public static Delete toDelete(final Mutate proto) { + MutateType type = proto.getMutateType(); + assert type == MutateType.DELETE : type.name(); + byte[] row = proto.getRow().toByteArray(); + long timestamp = HConstants.LATEST_TIMESTAMP; + if (proto.hasTimestamp()) { + timestamp = proto.getTimestamp(); + } + RowLock lock = null; + if (proto.hasLockId()) { + lock = new RowLock(proto.getLockId()); + } + Delete delete = new Delete(row, timestamp, lock); + delete.setWriteToWAL(proto.getWriteToWAL()); + for (NameBytesPair attribute: proto.getAttributeList()) { + delete.setAttribute(attribute.getName(), + attribute.getValue().toByteArray()); + } + for (ColumnValue column: proto.getColumnValueList()) { + byte[] family = column.getFamily().toByteArray(); + for (QualifierValue qv: column.getQualifierValueList()) { + DeleteType deleteType = qv.getDeleteType(); + byte[] qualifier = null; + if (qv.hasQualifier()) { + qualifier = qv.getQualifier().toByteArray(); + } + long ts = HConstants.LATEST_TIMESTAMP; + if (qv.hasTimestamp()) { + ts = qv.getTimestamp(); + } + if (deleteType == DeleteType.DELETE_ONE_VERSION) { + delete.deleteColumn(family, qualifier, ts); + } else if (deleteType == DeleteType.DELETE_MULTIPLE_VERSIONS) { + delete.deleteColumns(family, qualifier, ts); + } else { + delete.deleteFamily(family, ts); + } + } + } + return delete; + } + + /** + * Convert a protocol buffer Mutate to an Append + * + * @param proto the protocol buffer Mutate to convert + * @return the converted client Append + * @throws DoNotRetryIOException + */ + public static Append toAppend( + final Mutate proto) throws DoNotRetryIOException { + MutateType type = proto.getMutateType(); + assert type == MutateType.APPEND : type.name(); + byte[] row = proto.getRow().toByteArray(); + Append append = new Append(row); + append.setWriteToWAL(proto.getWriteToWAL()); + for (NameBytesPair attribute: proto.getAttributeList()) { + append.setAttribute(attribute.getName(), + attribute.getValue().toByteArray()); + } + for (ColumnValue column: proto.getColumnValueList()) { + byte[] family = column.getFamily().toByteArray(); + for (QualifierValue qv: column.getQualifierValueList()) { + byte[] qualifier = qv.getQualifier().toByteArray(); + if (!qv.hasValue()) { + throw new DoNotRetryIOException( + "Missing required field: qualifer value"); + } + byte[] value = qv.getValue().toByteArray(); + append.add(family, qualifier, value); + } + } + return append; + } + + /** + * Convert a protocol buffer Mutate to an Increment + * + * @param proto the protocol buffer Mutate to convert + * @return the converted client Increment + * @throws IOException + */ + public static Increment toIncrement( + final Mutate proto) throws IOException { + MutateType type = proto.getMutateType(); + assert type == MutateType.INCREMENT : type.name(); + RowLock lock = null; + if (proto.hasLockId()) { + lock = new RowLock(proto.getLockId()); + } + byte[] row = proto.getRow().toByteArray(); + Increment increment = new Increment(row, lock); + increment.setWriteToWAL(proto.getWriteToWAL()); + if (proto.hasTimeRange()) { + HBaseProtos.TimeRange timeRange = proto.getTimeRange(); + long minStamp = 0; + long maxStamp = Long.MAX_VALUE; + if (timeRange.hasFrom()) { + minStamp = timeRange.getFrom(); + } + if (timeRange.hasTo()) { + maxStamp = timeRange.getTo(); + } + increment.setTimeRange(minStamp, maxStamp); + } + for (ColumnValue column: proto.getColumnValueList()) { + byte[] family = column.getFamily().toByteArray(); + for (QualifierValue qv: column.getQualifierValueList()) { + byte[] qualifier = qv.getQualifier().toByteArray(); + if (!qv.hasValue()) { + throw new DoNotRetryIOException( + "Missing required field: qualifer value"); + } + long value = Bytes.toLong(qv.getValue().toByteArray()); + increment.addColumn(family, qualifier, value); + } + } + return increment; + } + + /** + * Convert a protocol buffer Scan to a client Scan + * + * @param proto the protocol buffer Scan to convert + * @return the converted client Scan + * @throws IOException + */ + public static Scan toScan( + final ClientProtos.Scan proto) throws IOException { + byte [] startRow = HConstants.EMPTY_START_ROW; + byte [] stopRow = HConstants.EMPTY_END_ROW; + if (proto.hasStartRow()) { + startRow = proto.getStartRow().toByteArray(); + } + if (proto.hasStopRow()) { + stopRow = proto.getStopRow().toByteArray(); + } + Scan scan = new Scan(startRow, stopRow); + if (proto.hasCacheBlocks()) { + scan.setCacheBlocks(proto.getCacheBlocks()); + } + if (proto.hasMaxVersions()) { + scan.setMaxVersions(proto.getMaxVersions()); + } + if (proto.hasTimeRange()) { + HBaseProtos.TimeRange timeRange = proto.getTimeRange(); + long minStamp = 0; + long maxStamp = Long.MAX_VALUE; + if (timeRange.hasFrom()) { + minStamp = timeRange.getFrom(); + } + if (timeRange.hasTo()) { + maxStamp = timeRange.getTo(); + } + scan.setTimeRange(minStamp, maxStamp); + } + if (proto.hasFilter()) { + NameBytesPair filter = proto.getFilter(); + scan.setFilter((Filter)toObject(filter)); + } + if (proto.hasBatchSize()) { + scan.setBatch(proto.getBatchSize()); + } + for (NameBytesPair attribute: proto.getAttributeList()) { + scan.setAttribute(attribute.getName(), attribute.getValue().toByteArray()); + } + if (proto.getColumnCount() > 0) { + for (Column column: proto.getColumnList()) { + byte[] family = column.getFamily().toByteArray(); + if (column.getQualifierCount() > 0) { + for (ByteString qualifier: column.getQualifierList()) { + scan.addColumn(family, qualifier.toByteArray()); + } + } else { + scan.addFamily(family); + } + } + } + return scan; + } + + /** + * Convert a client Result to a protocol buffer Result + * + * @param result the client Result to convert + * @return the converted protocol buffer Result + */ + public static ClientProtos.Result toResult(final Result result) { + ClientProtos.Result.Builder builder = ClientProtos.Result.newBuilder(); + List protos = new ArrayList(); + List keyValues = result.list(); + if (keyValues != null) { + for (KeyValue keyValue: keyValues) { + ByteString value = ByteString.copyFrom(keyValue.getBuffer(), + keyValue.getOffset(), keyValue.getLength()); + protos.add(value); + } + } + builder.addAllKeyValueBytes(protos); + return builder.build(); + } + + /** + * Convert a protocol buffer Result to a client Result + * + * @param proto the protocol buffer Result to convert + * @return the converted client Result + */ + public static Result toResult(final ClientProtos.Result proto) { + List values = proto.getKeyValueBytesList(); + List keyValues = new ArrayList(values.size()); + for (ByteString value: values) { + keyValues.add(new KeyValue(value.toByteArray())); + } + return new Result(keyValues); + } + + /** + * Get the HLog entries from a list of protocol buffer WALEntry + * + * @param protoList the list of protocol buffer WALEntry + * @return an array of HLog entries + */ + public static HLog.Entry[] + toHLogEntries(final List protoList) { + List entries = new ArrayList(); + for (WALEntry entry: protoList) { + WALKey walKey = entry.getWalKey(); + java.util.UUID clusterId = HConstants.DEFAULT_CLUSTER_ID; + if (walKey.hasClusterId()) { + UUID protoUuid = walKey.getClusterId(); + clusterId = new java.util.UUID( + protoUuid.getMostSigBits(), protoUuid.getLeastSigBits()); + } + HLogKey key = new HLogKey(walKey.getEncodedRegionName().toByteArray(), + walKey.getTableName().toByteArray(), walKey.getLogSequenceNumber(), + walKey.getWriteTime(), clusterId); + WALEntry.WALEdit walEdit = entry.getEdit(); + WALEdit edit = new WALEdit(); + for (ByteString keyValue: walEdit.getKeyValueList()) { + edit.add(new KeyValue(keyValue.toByteArray())); + } + if (walEdit.getFamilyScopeCount() > 0) { + TreeMap scopes = new TreeMap(); + for (FamilyScope scope: walEdit.getFamilyScopeList()) { + scopes.put(scope.getFamily().toByteArray(), + Integer.valueOf(scope.getScopeType().ordinal())); + } + edit.setScopes(scopes); + } + entries.add(new HLog.Entry(key, edit)); + } + return entries.toArray(new HLog.Entry[entries.size()]); + } + + /** + * Convert a protocol buffer Parameter to a Java object + * + * @param parameter the protocol buffer Parameter to convert + * @return the converted Java object + * @throws IOException if failed to deserialize the parameter + */ + public static Object toObject( + final NameBytesPair parameter) throws IOException { + if (parameter == null || !parameter.hasValue()) return null; + byte[] bytes = parameter.getValue().toByteArray(); + ByteArrayInputStream bais = null; + try { + bais = new ByteArrayInputStream(bytes); + DataInput in = new DataInputStream(bais); + return HbaseObjectWritable.readObject(in, null); + } finally { + if (bais != null) { + bais.close(); + } + } + } + + /** + * Convert a stringified protocol buffer exception Parameter to a Java Exception + * + * @param parameter the protocol buffer Parameter to convert + * @return the converted Exception + * @throws IOException if failed to deserialize the parameter + */ + @SuppressWarnings("unchecked") + public static Throwable toException( + final NameBytesPair parameter) throws IOException { + if (parameter == null || !parameter.hasValue()) return null; + String desc = parameter.getValue().toStringUtf8(); + String type = parameter.getName(); + try { + Class c = + (Class)Class.forName(type); + Constructor cn = + c.getDeclaredConstructor(String.class); + return cn.newInstance(desc); + } catch (Exception e) { + throw new IOException(e); + } + } + + /** + * Serialize a Java Object into a Parameter. The Java Object should be a + * Writable or protocol buffer Message + * + * @param value the Writable/Message object to be serialized + * @return the converted protocol buffer Parameter + * @throws IOException if failed to serialize the object + */ + public static NameBytesPair toParameter( + final Object value) throws IOException { + Class declaredClass = Object.class; + if (value != null) { + declaredClass = value.getClass(); + } + return toParameter(declaredClass, value); + } + + /** + * Serialize a Java Object into a Parameter. The Java Object should be a + * Writable or protocol buffer Message + * + * @param declaredClass the declared class of the parameter + * @param value the Writable/Message object to be serialized + * @return the converted protocol buffer Parameter + * @throws IOException if failed to serialize the object + */ + public static NameBytesPair toParameter( + final Class declaredClass, final Object value) throws IOException { + NameBytesPair.Builder builder = NameBytesPair.newBuilder(); + builder.setName(declaredClass.getName()); + if (value != null) { + ByteArrayOutputStream baos = null; + try { + baos = new ByteArrayOutputStream(); + DataOutput out = new DataOutputStream(baos); + Class clz = declaredClass; + if (HbaseObjectWritable.getClassCode(declaredClass) == null) { + clz = value.getClass(); + } + HbaseObjectWritable.writeObject(out, value, clz, null); + builder.setValue( + ByteString.copyFrom(baos.toByteArray())); + } finally { + if (baos != null) { + baos.close(); + } + } + } + return builder.build(); + } } \ No newline at end of file diff --git src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java new file mode 100644 index 0000000..a912cc3 --- /dev/null +++ src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -0,0 +1,782 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.protobuf; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.NavigableMap; +import java.util.NavigableSet; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Action; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.RowMutations; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.coprocessor.Exec; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; +import org.apache.hadoop.hbase.io.TimeRange; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; + +import com.google.protobuf.ByteString; +import com.google.protobuf.Message; + +/** + * Helper utility to build protocol buffer requests, + * or build components for protocol buffer requests. + */ +@InterfaceAudience.Private +public final class RequestConverter { + + private RequestConverter() { + } + +// Start utilities for Client + +/** + * Create a new protocol buffer GetRequest to get a row, all columns in a family. + * If there is no such row, return the closest row before it. + * + * @param regionName the name of the region to get + * @param row the row to get + * @param family the column family to get + * @param closestRowBefore if the requested row doesn't exist, + * should return the immediate row before + * @return a protocol buffer GetReuqest + */ + public static GetRequest buildGetRequest(final byte[] regionName, + final byte[] row, final byte[] family, boolean closestRowBefore) { + GetRequest.Builder builder = GetRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setClosestRowBefore(closestRowBefore); + builder.setRegion(region); + + Column.Builder columnBuilder = Column.newBuilder(); + columnBuilder.setFamily(ByteString.copyFrom(family)); + ClientProtos.Get.Builder getBuilder = + ClientProtos.Get.newBuilder(); + getBuilder.setRow(ByteString.copyFrom(row)); + getBuilder.addColumn(columnBuilder.build()); + builder.setGet(getBuilder.build()); + return builder.build(); + } + + /** + * Create a protocol buffer GetRequest for a client Get + * + * @param regionName the name of the region to get + * @param get the client Get + * @return a protocol buffer GetReuqest + */ + public static GetRequest buildGetRequest(final byte[] regionName, + final Get get) throws IOException { + return buildGetRequest(regionName, get, false); + } + + /** + * Create a protocol buffer GetRequest for a client Get + * + * @param regionName the name of the region to get + * @param get the client Get + * @param existenceOnly indicate if check row existence only + * @return a protocol buffer GetReuqest + */ + public static GetRequest buildGetRequest(final byte[] regionName, + final Get get, final boolean existenceOnly) throws IOException { + GetRequest.Builder builder = GetRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setExistenceOnly(existenceOnly); + builder.setRegion(region); + builder.setGet(buildGet(get)); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a client increment + * + * @param regionName + * @param row + * @param family + * @param qualifier + * @param amount + * @param writeToWAL + * @return a mutate request + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final byte[] row, final byte[] family, + final byte [] qualifier, final long amount, final boolean writeToWAL) { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + + Mutate.Builder mutateBuilder = Mutate.newBuilder(); + mutateBuilder.setRow(ByteString.copyFrom(row)); + mutateBuilder.setMutateType(MutateType.INCREMENT); + mutateBuilder.setWriteToWAL(writeToWAL); + ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); + columnBuilder.setFamily(ByteString.copyFrom(family)); + QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); + valueBuilder.setValue(ByteString.copyFrom(Bytes.toBytes(amount))); + valueBuilder.setQualifier(ByteString.copyFrom(qualifier)); + columnBuilder.addQualifierValue(valueBuilder.build()); + mutateBuilder.addColumnValue(columnBuilder.build()); + + builder.setMutate(mutateBuilder.build()); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a conditioned put + * + * @param regionName + * @param row + * @param family + * @param qualifier + * @param comparator + * @param compareType + * @param put + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final byte[] row, final byte[] family, + final byte [] qualifier, final WritableByteArrayComparable comparator, + final CompareType compareType, final Put put) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + Condition condition = buildCondition( + row, family, qualifier, comparator, compareType); + builder.setMutate(buildMutate(MutateType.PUT, put)); + builder.setCondition(condition); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a conditioned delete + * + * @param regionName + * @param row + * @param family + * @param qualifier + * @param comparator + * @param compareType + * @param delete + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final byte[] row, final byte[] family, + final byte [] qualifier, final WritableByteArrayComparable comparator, + final CompareType compareType, final Delete delete) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + Condition condition = buildCondition( + row, family, qualifier, comparator, compareType); + builder.setMutate(buildMutate(MutateType.DELETE, delete)); + builder.setCondition(condition); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a put + * + * @param regionName + * @param put + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final Put put) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setMutate(buildMutate(MutateType.PUT, put)); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for an append + * + * @param regionName + * @param append + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final Append append) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setMutate(buildMutate(MutateType.APPEND, append)); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a client increment + * + * @param regionName + * @param increment + * @return a mutate request + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final Increment increment) { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setMutate(buildMutate(increment)); + return builder.build(); + } + + /** + * Create a protocol buffer MutateRequest for a delete + * + * @param regionName + * @param delete + * @return a mutate request + * @throws IOException + */ + public static MutateRequest buildMutateRequest( + final byte[] regionName, final Delete delete) throws IOException { + MutateRequest.Builder builder = MutateRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setMutate(buildMutate(MutateType.DELETE, delete)); + return builder.build(); + } + + /** + * Create a protocol buffer MultiRequest for a row mutations + * + * @param regionName + * @param rowMutations + * @return a multi request + * @throws IOException + */ + public static MultiRequest buildMultiRequest(final byte[] regionName, + final RowMutations rowMutations) throws IOException { + MultiRequest.Builder builder = MultiRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setAtomic(true); + for (Mutation mutation: rowMutations.getMutations()) { + MutateType mutateType = null; + if (mutation instanceof Put) { + mutateType = MutateType.PUT; + } else if (mutation instanceof Delete) { + mutateType = MutateType.DELETE; + } else { + throw new DoNotRetryIOException( + "RowMutations supports only put and delete, not " + + mutation.getClass().getName()); + } + Mutate mutate = buildMutate(mutateType, mutation); + builder.addAction(ProtobufUtil.toParameter(mutate)); + } + return builder.build(); + } + + /** + * Create a protocol buffer ScanRequest for a client Scan + * + * @param regionName + * @param scan + * @param numberOfRows + * @param closeScanner + * @return a scan request + * @throws IOException + */ + public static ScanRequest buildScanRequest(final byte[] regionName, + final Scan scan, final int numberOfRows, + final boolean closeScanner) throws IOException { + ScanRequest.Builder builder = ScanRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setNumberOfRows(numberOfRows); + builder.setCloseScanner(closeScanner); + builder.setRegion(region); + + ClientProtos.Scan.Builder scanBuilder = + ClientProtos.Scan.newBuilder(); + scanBuilder.setCacheBlocks(scan.getCacheBlocks()); + scanBuilder.setBatchSize(scan.getBatch()); + scanBuilder.setMaxVersions(scan.getMaxVersions()); + TimeRange timeRange = scan.getTimeRange(); + if (!timeRange.isAllTime()) { + HBaseProtos.TimeRange.Builder timeRangeBuilder = + HBaseProtos.TimeRange.newBuilder(); + timeRangeBuilder.setFrom(timeRange.getMin()); + timeRangeBuilder.setTo(timeRange.getMax()); + scanBuilder.setTimeRange(timeRangeBuilder.build()); + } + Map attributes = scan.getAttributesMap(); + if (!attributes.isEmpty()) { + NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); + for (Map.Entry attribute: attributes.entrySet()) { + attributeBuilder.setName(attribute.getKey()); + attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + scanBuilder.addAttribute(attributeBuilder.build()); + } + } + byte[] startRow = scan.getStartRow(); + if (startRow != null && startRow.length > 0) { + scanBuilder.setStartRow(ByteString.copyFrom(startRow)); + } + byte[] stopRow = scan.getStopRow(); + if (stopRow != null && stopRow.length > 0) { + scanBuilder.setStopRow(ByteString.copyFrom(stopRow)); + } + if (scan.hasFilter()) { + scanBuilder.setFilter(ProtobufUtil.toParameter(scan.getFilter())); + } + Column.Builder columnBuilder = Column.newBuilder(); + for (Map.Entry> + family: scan.getFamilyMap().entrySet()) { + columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + NavigableSet columns = family.getValue(); + columnBuilder.clearQualifier(); + if (columns != null && columns.size() > 0) { + for (byte [] qualifier: family.getValue()) { + if (qualifier != null) { + columnBuilder.addQualifier(ByteString.copyFrom(qualifier)); + } + } + } + scanBuilder.addColumn(columnBuilder.build()); + } + builder.setScan(scanBuilder.build()); + return builder.build(); + } + + /** + * Create a protocol buffer ScanRequest for a scanner id + * + * @param scannerId + * @param numberOfRows + * @param closeScanner + * @return a scan request + */ + public static ScanRequest buildScanRequest(final long scannerId, + final int numberOfRows, final boolean closeScanner) { + ScanRequest.Builder builder = ScanRequest.newBuilder(); + builder.setNumberOfRows(numberOfRows); + builder.setCloseScanner(closeScanner); + builder.setScannerId(scannerId); + return builder.build(); + } + + /** + * Create a protocol buffer LockRowRequest + * + * @param regionName + * @param row + * @return a lock row request + */ + public static LockRowRequest buildLockRowRequest( + final byte[] regionName, final byte[] row) { + LockRowRequest.Builder builder = LockRowRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.addRow(ByteString.copyFrom(row)); + return builder.build(); + } + + /** + * Create a protocol buffer UnlockRowRequest + * + * @param regionName + * @param lockId + * @return a unlock row request + */ + public static UnlockRowRequest buildUnlockRowRequest( + final byte[] regionName, final long lockId) { + UnlockRowRequest.Builder builder = UnlockRowRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setLockId(lockId); + return builder.build(); + } + + /** + * Create a protocol buffer bulk load request + * + * @param familyPaths + * @param regionName + * @return a bulk load request + */ + public static BulkLoadHFileRequest buildBulkLoadHFileRequest( + final List> familyPaths, final byte[] regionName) { + BulkLoadHFileRequest.Builder builder = BulkLoadHFileRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + FamilyPath.Builder familyPathBuilder = FamilyPath.newBuilder(); + for (Pair familyPath: familyPaths) { + familyPathBuilder.setFamily(ByteString.copyFrom(familyPath.getFirst())); + familyPathBuilder.setPath(familyPath.getSecond()); + builder.addFamilyPath(familyPathBuilder.build()); + } + return builder.build(); + } + + /** + * Create a protocol buffer coprocessor exec request + * + * @param regionName + * @param exec + * @return a coprocessor exec request + * @throws IOException + */ + public static ExecCoprocessorRequest buildExecCoprocessorRequest( + final byte[] regionName, final Exec exec) throws IOException { + ExecCoprocessorRequest.Builder builder = ExecCoprocessorRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + builder.setCall(buildExec(exec)); + return builder.build(); + } + + /** + * Create a protocol buffer multi request for a list of actions. + * RowMutations in the list (if any) will be ignored. + * + * @param regionName + * @param actions + * @return a multi request + * @throws IOException + */ + public static MultiRequest buildMultiRequest(final byte[] regionName, + final List> actions) throws IOException { + MultiRequest.Builder builder = MultiRequest.newBuilder(); + RegionSpecifier region = buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + builder.setRegion(region); + for (Action action: actions) { + Message protoAction = null; + Row row = action.getAction(); + if (row instanceof Get) { + protoAction = buildGet((Get)row); + } else if (row instanceof Put) { + protoAction = buildMutate(MutateType.PUT, (Put)row); + } else if (row instanceof Delete) { + protoAction = buildMutate(MutateType.DELETE, (Delete)row); + } else if (row instanceof Exec) { + protoAction = buildExec((Exec)row); + } else if (row instanceof Append) { + protoAction = buildMutate(MutateType.APPEND, (Append)row); + } else if (row instanceof Increment) { + protoAction = buildMutate((Increment)row); + } else if (row instanceof RowMutations) { + continue; // ignore RowMutations + } else { + throw new DoNotRetryIOException( + "multi doesn't support " + row.getClass().getName()); + } + builder.addAction(ProtobufUtil.toParameter(protoAction)); + } + return builder.build(); + } + +// End utilities for Client + + /** + * Create a protocol buffer Condition + * + * @param row + * @param family + * @param qualifier + * @param comparator + * @param compareType + * @return a Condition + * @throws IOException + */ + private static Condition buildCondition(final byte[] row, + final byte[] family, final byte [] qualifier, + final WritableByteArrayComparable comparator, + final CompareType compareType) throws IOException { + Condition.Builder builder = Condition.newBuilder(); + builder.setRow(ByteString.copyFrom(row)); + builder.setFamily(ByteString.copyFrom(family)); + builder.setQualifier(ByteString.copyFrom(qualifier)); + builder.setComparator(ProtobufUtil.toParameter(comparator)); + builder.setCompareType(compareType); + return builder.build(); + } + + /** + * Create a new protocol buffer Exec based on a client Exec + * + * @param exec + * @return + * @throws IOException + */ + private static ClientProtos.Exec buildExec( + final Exec exec) throws IOException { + ClientProtos.Exec.Builder + builder = ClientProtos.Exec.newBuilder(); + Configuration conf = exec.getConf(); + if (conf != null) { + NameStringPair.Builder propertyBuilder = NameStringPair.newBuilder(); + Iterator> iterator = conf.iterator(); + while (iterator.hasNext()) { + Entry entry = iterator.next(); + propertyBuilder.setName(entry.getKey()); + propertyBuilder.setValue(entry.getValue()); + builder.addProperty(propertyBuilder.build()); + } + } + builder.setProtocolName(exec.getProtocolName()); + builder.setMethodName(exec.getMethodName()); + builder.setRow(ByteString.copyFrom(exec.getRow())); + Object[] parameters = exec.getParameters(); + if (parameters != null && parameters.length > 0) { + Class[] declaredClasses = exec.getParameterClasses(); + for (int i = 0, n = parameters.length; i < n; i++) { + builder.addParameter( + ProtobufUtil.toParameter(declaredClasses[i], parameters[i])); + } + } + return builder.build(); + } + + /** + * Create a protocol buffer Get based on a client Get. + * + * @param get the client Get + * @return a protocol buffer Get + * @throws IOException + */ + private static ClientProtos.Get buildGet( + final Get get) throws IOException { + ClientProtos.Get.Builder builder = + ClientProtos.Get.newBuilder(); + builder.setRow(ByteString.copyFrom(get.getRow())); + builder.setCacheBlocks(get.getCacheBlocks()); + builder.setMaxVersions(get.getMaxVersions()); + if (get.getLockId() >= 0) { + builder.setLockId(get.getLockId()); + } + if (get.getFilter() != null) { + builder.setFilter(ProtobufUtil.toParameter(get.getFilter())); + } + TimeRange timeRange = get.getTimeRange(); + if (!timeRange.isAllTime()) { + HBaseProtos.TimeRange.Builder timeRangeBuilder = + HBaseProtos.TimeRange.newBuilder(); + timeRangeBuilder.setFrom(timeRange.getMin()); + timeRangeBuilder.setTo(timeRange.getMax()); + builder.setTimeRange(timeRangeBuilder.build()); + } + Map attributes = get.getAttributesMap(); + if (!attributes.isEmpty()) { + NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); + for (Map.Entry attribute: attributes.entrySet()) { + attributeBuilder.setName(attribute.getKey()); + attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + builder.addAttribute(attributeBuilder.build()); + } + } + if (get.hasFamilies()) { + Column.Builder columnBuilder = Column.newBuilder(); + Map> families = get.getFamilyMap(); + for (Map.Entry> family: families.entrySet()) { + NavigableSet qualifiers = family.getValue(); + columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.clearQualifier(); + if (qualifiers != null && qualifiers.size() > 0) { + for (byte[] qualifier: qualifiers) { + if (qualifier != null) { + columnBuilder.addQualifier(ByteString.copyFrom(qualifier)); + } + } + } + builder.addColumn(columnBuilder.build()); + } + } + return builder.build(); + } + + private static Mutate buildMutate(final Increment increment) { + Mutate.Builder builder = Mutate.newBuilder(); + builder.setRow(ByteString.copyFrom(increment.getRow())); + builder.setMutateType(MutateType.INCREMENT); + builder.setWriteToWAL(increment.getWriteToWAL()); + if (increment.getLockId() >= 0) { + builder.setLockId(increment.getLockId()); + } + TimeRange timeRange = increment.getTimeRange(); + if (!timeRange.isAllTime()) { + HBaseProtos.TimeRange.Builder timeRangeBuilder = + HBaseProtos.TimeRange.newBuilder(); + timeRangeBuilder.setFrom(timeRange.getMin()); + timeRangeBuilder.setTo(timeRange.getMax()); + builder.setTimeRange(timeRangeBuilder.build()); + } + ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); + QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); + for (Map.Entry> + family: increment.getFamilyMap().entrySet()) { + columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.clearQualifierValue(); + NavigableMap values = family.getValue(); + if (values != null && values.size() > 0) { + for (Map.Entry value: values.entrySet()) { + valueBuilder.setQualifier(ByteString.copyFrom(value.getKey())); + valueBuilder.setValue(ByteString.copyFrom( + Bytes.toBytes(value.getValue().longValue()))); + columnBuilder.addQualifierValue(valueBuilder.build()); + } + } + builder.addColumnValue(columnBuilder.build()); + } + return builder.build(); + } + + /** + * Create a protocol buffer Mutate based on a client Mutation + * + * @param mutateType + * @param mutation + * @return a mutate + * @throws IOException + */ + private static Mutate buildMutate(final MutateType mutateType, + final Mutation mutation) throws IOException { + Mutate.Builder mutateBuilder = Mutate.newBuilder(); + mutateBuilder.setRow(ByteString.copyFrom(mutation.getRow())); + mutateBuilder.setMutateType(mutateType); + mutateBuilder.setWriteToWAL(mutation.getWriteToWAL()); + if (mutation.getLockId() >= 0) { + mutateBuilder.setLockId(mutation.getLockId()); + } + mutateBuilder.setTimestamp(mutation.getTimeStamp()); + Map attributes = mutation.getAttributesMap(); + if (!attributes.isEmpty()) { + NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); + for (Map.Entry attribute: attributes.entrySet()) { + attributeBuilder.setName(attribute.getKey()); + attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + mutateBuilder.addAttribute(attributeBuilder.build()); + } + } + ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); + QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); + for (Map.Entry> + family: mutation.getFamilyMap().entrySet()) { + columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.clearQualifierValue(); + for (KeyValue value: family.getValue()) { + valueBuilder.setQualifier(ByteString.copyFrom(value.getQualifier())); + valueBuilder.setValue(ByteString.copyFrom(value.getValue())); + valueBuilder.setTimestamp(value.getTimestamp()); + if (mutateType == MutateType.DELETE) { + KeyValue.Type keyValueType = KeyValue.Type.codeToType(value.getType()); + valueBuilder.setDeleteType(toDeleteType(keyValueType)); + } + columnBuilder.addQualifierValue(valueBuilder.build()); + } + mutateBuilder.addColumnValue(columnBuilder.build()); + } + return mutateBuilder.build(); + } + + /** + * Convert a byte array to a protocol buffer RegionSpecifier + * + * @param type the region specifier type + * @param value the region specifier byte array value + * @return a protocol buffer RegionSpecifier + */ + private static RegionSpecifier buildRegionSpecifier( + final RegionSpecifierType type, final byte[] value) { + RegionSpecifier.Builder regionBuilder = RegionSpecifier.newBuilder(); + regionBuilder.setValue(ByteString.copyFrom(value)); + regionBuilder.setType(type); + return regionBuilder.build(); + } + + /** + * Convert a delete KeyValue type to protocol buffer DeleteType. + * + * @param type + * @return + * @throws IOException + */ + private static DeleteType toDeleteType( + KeyValue.Type type) throws IOException { + switch (type) { + case Delete: + return DeleteType.DELETE_ONE_VERSION; + case DeleteColumn: + return DeleteType.DELETE_MULTIPLE_VERSIONS; + case DeleteFamily: + return DeleteType.DELETE_FAMILY; + default: + throw new IOException("Unknown delete type: " + type); + } + } +} diff --git src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java new file mode 100644 index 0000000..ecaf9fe --- /dev/null +++ src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java @@ -0,0 +1,187 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.protobuf; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; +import org.apache.hadoop.hbase.regionserver.RegionOpeningState; +import org.apache.hadoop.util.StringUtils; + +import com.google.protobuf.ByteString; + +/** + * Helper utility to build protocol buffer responses, + * or retrieve data from protocol buffer responses. + */ +@InterfaceAudience.Private +public final class ResponseConverter { + + private ResponseConverter() { + } + +// Start utilities for Client + + /** + * Get the client Results from a protocol buffer ScanResponse + * + * @param response the protocol buffer ScanResponse + * @return the client Results in the response + */ + public static Result[] getResults(final ScanResponse response) { + if (response == null) return null; + int count = response.getResultCount(); + Result[] results = new Result[count]; + for (int i = 0; i < count; i++) { + results[i] = ProtobufUtil.toResult(response.getResult(i)); + } + return results; + } + + /** + * Get the results from a protocol buffer MultiResponse + * + * @param proto the protocol buffer MultiResponse to convert + * @return the results in the MultiResponse + * @throws IOException + */ + public static List getResults( + final ClientProtos.MultiResponse proto) throws IOException { + List results = new ArrayList(); + List resultList = proto.getResultList(); + for (int i = 0, n = resultList.size(); i < n; i++) { + ActionResult result = resultList.get(i); + if (result.hasException()) { + results.add(ProtobufUtil.toException(result.getException())); + } else if (result.hasValue()) { + Object value = ProtobufUtil.toObject(result.getValue()); + if (value instanceof ClientProtos.Result) { + results.add(ProtobufUtil.toResult((ClientProtos.Result)value)); + } else { + results.add(value); + } + } else { + results.add(new Result()); + } + } + return results; + } + + /** + * Wrap a throwable to an action result. + * + * @param t + * @return an action result + */ + public static ActionResult buildActionResult(final Throwable t) { + ActionResult.Builder builder = ActionResult.newBuilder(); + NameBytesPair.Builder parameterBuilder = NameBytesPair.newBuilder(); + parameterBuilder.setName(t.getClass().getName()); + parameterBuilder.setValue( + ByteString.copyFromUtf8(StringUtils.stringifyException(t))); + builder.setException(parameterBuilder.build()); + return builder.build(); + } + +// End utilities for Client +// Start utilities for Admin + + /** + * Get the list of regions to flush from a RollLogWriterResponse + * + * @param proto the RollLogWriterResponse + * @return the the list of regions to flush + */ + public static byte[][] getRegions(final RollWALWriterResponse proto) { + if (proto == null || proto.getRegionToFlushCount() == 0) return null; + List regions = new ArrayList(); + for (ByteString region: proto.getRegionToFlushList()) { + regions.add(region.toByteArray()); + } + return (byte[][])regions.toArray(); + } + + /** + * Get the list of region info from a GetOnlineRegionResponse + * + * @param proto the GetOnlineRegionResponse + * @return the list of region info + */ + public static List getRegionInfos + (final GetOnlineRegionResponse proto) { + if (proto == null || proto.getRegionInfoCount() == 0) return null; + List regionInfos = new ArrayList(); + for (RegionInfo regionInfo: proto.getRegionInfoList()) { + regionInfos.add(ProtobufUtil.toRegionInfo(regionInfo)); + } + return regionInfos; + } + + /** + * Get the region info from a GetRegionInfoResponse + * + * @param proto the GetRegionInfoResponse + * @return the region info + */ + public static HRegionInfo getRegionInfo + (final GetRegionInfoResponse proto) { + if (proto == null || proto.getRegionInfo() == null) return null; + return ProtobufUtil.toRegionInfo(proto.getRegionInfo()); + } + + /** + * Get the region opening state from a OpenRegionResponse + * + * @param proto the OpenRegionResponse + * @return the region opening state + */ + public static RegionOpeningState getRegionOpeningState + (final OpenRegionResponse proto) { + if (proto == null || proto.getOpeningStateCount() != 1) return null; + return RegionOpeningState.valueOf( + proto.getOpeningState(0).name()); + } + + /** + * Check if the region is closed from a CloseRegionResponse + * + * @param proto the CloseRegionResponse + * @return the region close state + */ + public static boolean isClosed + (final CloseRegionResponse proto) { + if (proto == null || !proto.hasClosed()) return false; + return proto.getClosed(); + } + +// End utilities for Admin +} diff --git src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java new file mode 100644 index 0000000..e78e56d --- /dev/null +++ src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java @@ -0,0 +1,15274 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Admin.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class AdminProtos { + private AdminProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface GetRegionInfoRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + } + public static final class GetRegionInfoRequest extends + com.google.protobuf.GeneratedMessage + implements GetRegionInfoRequestOrBuilder { + // Use GetRegionInfoRequest.newBuilder() to construct. + private GetRegionInfoRequest(Builder builder) { + super(builder); + } + private GetRegionInfoRequest(boolean noInit) {} + + private static final GetRegionInfoRequest defaultInstance; + public static GetRegionInfoRequest getDefaultInstance() { + return defaultInstance; + } + + public GetRegionInfoRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest) + } + + static { + defaultInstance = new GetRegionInfoRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRegionInfoRequest) + } + + public interface GetRegionInfoResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionInfo regionInfo = 1; + boolean hasRegionInfo(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); + } + public static final class GetRegionInfoResponse extends + com.google.protobuf.GeneratedMessage + implements GetRegionInfoResponseOrBuilder { + // Use GetRegionInfoResponse.newBuilder() to construct. + private GetRegionInfoResponse(Builder builder) { + super(builder); + } + private GetRegionInfoResponse(boolean noInit) {} + + private static final GetRegionInfoResponse defaultInstance; + public static GetRegionInfoResponse getDefaultInstance() { + return defaultInstance; + } + + public GetRegionInfoResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionInfo regionInfo = 1; + public static final int REGIONINFO_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; + public boolean hasRegionInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { + return regionInfo_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { + return regionInfo_; + } + + private void initFields() { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionInfo()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegionInfo().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, regionInfo_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionInfo_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj; + + boolean result = true; + result = result && (hasRegionInfo() == other.hasRegionInfo()); + if (hasRegionInfo()) { + result = result && getRegionInfo() + .equals(other.getRegionInfo()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionInfo()) { + hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; + hash = (53 * hash) + getRegionInfo().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionInfoBuilder_ == null) { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } else { + regionInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionInfoBuilder_ == null) { + result.regionInfo_ = regionInfo_; + } else { + result.regionInfo_ = regionInfoBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; + if (other.hasRegionInfo()) { + mergeRegionInfo(other.getRegionInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionInfo()) { + + return false; + } + if (!getRegionInfo().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); + if (hasRegionInfo()) { + subBuilder.mergeFrom(getRegionInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegionInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionInfo regionInfo = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + public boolean hasRegionInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { + if (regionInfoBuilder_ == null) { + return regionInfo_; + } else { + return regionInfoBuilder_.getMessage(); + } + } + public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + regionInfo_ = value; + onChanged(); + } else { + regionInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegionInfo( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + regionInfo_ = builderForValue.build(); + onChanged(); + } else { + regionInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { + regionInfo_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); + } else { + regionInfo_ = value; + } + onChanged(); + } else { + regionInfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegionInfo() { + if (regionInfoBuilder_ == null) { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + onChanged(); + } else { + regionInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionInfoFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { + if (regionInfoBuilder_ != null) { + return regionInfoBuilder_.getMessageOrBuilder(); + } else { + return regionInfo_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> + getRegionInfoFieldBuilder() { + if (regionInfoBuilder_ == null) { + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( + regionInfo_, + getParentForChildren(), + isClean()); + regionInfo_ = null; + } + return regionInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse) + } + + static { + defaultInstance = new GetRegionInfoResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRegionInfoResponse) + } + + public interface GetStoreFileListRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated bytes columnFamily = 2; + java.util.List getColumnFamilyList(); + int getColumnFamilyCount(); + com.google.protobuf.ByteString getColumnFamily(int index); + } + public static final class GetStoreFileListRequest extends + com.google.protobuf.GeneratedMessage + implements GetStoreFileListRequestOrBuilder { + // Use GetStoreFileListRequest.newBuilder() to construct. + private GetStoreFileListRequest(Builder builder) { + super(builder); + } + private GetStoreFileListRequest(boolean noInit) {} + + private static final GetStoreFileListRequest defaultInstance; + public static GetStoreFileListRequest getDefaultInstance() { + return defaultInstance; + } + + public GetStoreFileListRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated bytes columnFamily = 2; + public static final int COLUMNFAMILY_FIELD_NUMBER = 2; + private java.util.List columnFamily_; + public java.util.List + getColumnFamilyList() { + return columnFamily_; + } + public int getColumnFamilyCount() { + return columnFamily_.size(); + } + public com.google.protobuf.ByteString getColumnFamily(int index) { + return columnFamily_.get(index); + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + columnFamily_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < columnFamily_.size(); i++) { + output.writeBytes(2, columnFamily_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + { + int dataSize = 0; + for (int i = 0; i < columnFamily_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(columnFamily_.get(i)); + } + size += dataSize; + size += 1 * getColumnFamilyList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getColumnFamilyList() + .equals(other.getColumnFamilyList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getColumnFamilyCount() > 0) { + hash = (37 * hash) + COLUMNFAMILY_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamilyList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + columnFamily_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + columnFamily_ = java.util.Collections.unmodifiableList(columnFamily_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.columnFamily_ = columnFamily_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (!other.columnFamily_.isEmpty()) { + if (columnFamily_.isEmpty()) { + columnFamily_ = other.columnFamily_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureColumnFamilyIsMutable(); + columnFamily_.addAll(other.columnFamily_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + ensureColumnFamilyIsMutable(); + columnFamily_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated bytes columnFamily = 2; + private java.util.List columnFamily_ = java.util.Collections.emptyList();; + private void ensureColumnFamilyIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + columnFamily_ = new java.util.ArrayList(columnFamily_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getColumnFamilyList() { + return java.util.Collections.unmodifiableList(columnFamily_); + } + public int getColumnFamilyCount() { + return columnFamily_.size(); + } + public com.google.protobuf.ByteString getColumnFamily(int index) { + return columnFamily_.get(index); + } + public Builder setColumnFamily( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnFamilyIsMutable(); + columnFamily_.set(index, value); + onChanged(); + return this; + } + public Builder addColumnFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnFamilyIsMutable(); + columnFamily_.add(value); + onChanged(); + return this; + } + public Builder addAllColumnFamily( + java.lang.Iterable values) { + ensureColumnFamilyIsMutable(); + super.addAll(values, columnFamily_); + onChanged(); + return this; + } + public Builder clearColumnFamily() { + columnFamily_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetStoreFileListRequest) + } + + static { + defaultInstance = new GetStoreFileListRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetStoreFileListRequest) + } + + public interface GetStoreFileListResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string storeFile = 1; + java.util.List getStoreFileList(); + int getStoreFileCount(); + String getStoreFile(int index); + } + public static final class GetStoreFileListResponse extends + com.google.protobuf.GeneratedMessage + implements GetStoreFileListResponseOrBuilder { + // Use GetStoreFileListResponse.newBuilder() to construct. + private GetStoreFileListResponse(Builder builder) { + super(builder); + } + private GetStoreFileListResponse(boolean noInit) {} + + private static final GetStoreFileListResponse defaultInstance; + public static GetStoreFileListResponse getDefaultInstance() { + return defaultInstance; + } + + public GetStoreFileListResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; + } + + // repeated string storeFile = 1; + public static final int STOREFILE_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList storeFile_; + public java.util.List + getStoreFileList() { + return storeFile_; + } + public int getStoreFileCount() { + return storeFile_.size(); + } + public String getStoreFile(int index) { + return storeFile_.get(index); + } + + private void initFields() { + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < storeFile_.size(); i++) { + output.writeBytes(1, storeFile_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < storeFile_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(storeFile_.getByteString(i)); + } + size += dataSize; + size += 1 * getStoreFileList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse) obj; + + boolean result = true; + result = result && getStoreFileList() + .equals(other.getStoreFileList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getStoreFileCount() > 0) { + hash = (37 * hash) + STOREFILE_FIELD_NUMBER; + hash = (53 * hash) + getStoreFileList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( + storeFile_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.storeFile_ = storeFile_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance()) return this; + if (!other.storeFile_.isEmpty()) { + if (storeFile_.isEmpty()) { + storeFile_ = other.storeFile_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureStoreFileIsMutable(); + storeFile_.addAll(other.storeFile_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureStoreFileIsMutable(); + storeFile_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated string storeFile = 1; + private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureStoreFileIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getStoreFileList() { + return java.util.Collections.unmodifiableList(storeFile_); + } + public int getStoreFileCount() { + return storeFile_.size(); + } + public String getStoreFile(int index) { + return storeFile_.get(index); + } + public Builder setStoreFile( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.set(index, value); + onChanged(); + return this; + } + public Builder addStoreFile(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.add(value); + onChanged(); + return this; + } + public Builder addAllStoreFile( + java.lang.Iterable values) { + ensureStoreFileIsMutable(); + super.addAll(values, storeFile_); + onChanged(); + return this; + } + public Builder clearStoreFile() { + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + void addStoreFile(com.google.protobuf.ByteString value) { + ensureStoreFileIsMutable(); + storeFile_.add(value); + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:GetStoreFileListResponse) + } + + static { + defaultInstance = new GetStoreFileListResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetStoreFileListResponse) + } + + public interface GetOnlineRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class GetOnlineRegionRequest extends + com.google.protobuf.GeneratedMessage + implements GetOnlineRegionRequestOrBuilder { + // Use GetOnlineRegionRequest.newBuilder() to construct. + private GetOnlineRegionRequest(Builder builder) { + super(builder); + } + private GetOnlineRegionRequest(boolean noInit) {} + + private static final GetOnlineRegionRequest defaultInstance; + public static GetOnlineRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public GetOnlineRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:GetOnlineRegionRequest) + } + + static { + defaultInstance = new GetOnlineRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetOnlineRegionRequest) + } + + public interface GetOnlineRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionInfo regionInfo = 1; + java.util.List + getRegionInfoList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index); + int getRegionInfoCount(); + java.util.List + getRegionInfoOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index); + } + public static final class GetOnlineRegionResponse extends + com.google.protobuf.GeneratedMessage + implements GetOnlineRegionResponseOrBuilder { + // Use GetOnlineRegionResponse.newBuilder() to construct. + private GetOnlineRegionResponse(Builder builder) { + super(builder); + } + private GetOnlineRegionResponse(boolean noInit) {} + + private static final GetOnlineRegionResponse defaultInstance; + public static GetOnlineRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public GetOnlineRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + } + + // repeated .RegionInfo regionInfo = 1; + public static final int REGIONINFO_FIELD_NUMBER = 1; + private java.util.List regionInfo_; + public java.util.List getRegionInfoList() { + return regionInfo_; + } + public java.util.List + getRegionInfoOrBuilderList() { + return regionInfo_; + } + public int getRegionInfoCount() { + return regionInfo_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { + return regionInfo_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index) { + return regionInfo_.get(index); + } + + private void initFields() { + regionInfo_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRegionInfoCount(); i++) { + if (!getRegionInfo(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < regionInfo_.size(); i++) { + output.writeMessage(1, regionInfo_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < regionInfo_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionInfo_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) obj; + + boolean result = true; + result = result && getRegionInfoList() + .equals(other.getRegionInfoList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionInfoCount() > 0) { + hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; + hash = (53 * hash) + getRegionInfoList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionInfoBuilder_ == null) { + regionInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + regionInfoBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse(this); + int from_bitField0_ = bitField0_; + if (regionInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regionInfo_ = regionInfo_; + } else { + result.regionInfo_ = regionInfoBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; + if (regionInfoBuilder_ == null) { + if (!other.regionInfo_.isEmpty()) { + if (regionInfo_.isEmpty()) { + regionInfo_ = other.regionInfo_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionInfoIsMutable(); + regionInfo_.addAll(other.regionInfo_); + } + onChanged(); + } + } else { + if (!other.regionInfo_.isEmpty()) { + if (regionInfoBuilder_.isEmpty()) { + regionInfoBuilder_.dispose(); + regionInfoBuilder_ = null; + regionInfo_ = other.regionInfo_; + bitField0_ = (bitField0_ & ~0x00000001); + regionInfoBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRegionInfoFieldBuilder() : null; + } else { + regionInfoBuilder_.addAllMessages(other.regionInfo_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRegionInfoCount(); i++) { + if (!getRegionInfo(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addRegionInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .RegionInfo regionInfo = 1; + private java.util.List regionInfo_ = + java.util.Collections.emptyList(); + private void ensureRegionInfoIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regionInfo_ = new java.util.ArrayList(regionInfo_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + + public java.util.List getRegionInfoList() { + if (regionInfoBuilder_ == null) { + return java.util.Collections.unmodifiableList(regionInfo_); + } else { + return regionInfoBuilder_.getMessageList(); + } + } + public int getRegionInfoCount() { + if (regionInfoBuilder_ == null) { + return regionInfo_.size(); + } else { + return regionInfoBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { + if (regionInfoBuilder_ == null) { + return regionInfo_.get(index); + } else { + return regionInfoBuilder_.getMessage(index); + } + } + public Builder setRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.set(index, value); + onChanged(); + } else { + regionInfoBuilder_.setMessage(index, value); + } + return this; + } + public Builder setRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.set(index, builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.add(value); + onChanged(); + } else { + regionInfoBuilder_.addMessage(value); + } + return this; + } + public Builder addRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.add(index, value); + onChanged(); + } else { + regionInfoBuilder_.addMessage(index, value); + } + return this; + } + public Builder addRegionInfo( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.add(builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.add(index, builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllRegionInfo( + java.lang.Iterable values) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + super.addAll(values, regionInfo_); + onChanged(); + } else { + regionInfoBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearRegionInfo() { + if (regionInfoBuilder_ == null) { + regionInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + regionInfoBuilder_.clear(); + } + return this; + } + public Builder removeRegionInfo(int index) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.remove(index); + onChanged(); + } else { + regionInfoBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder( + int index) { + return getRegionInfoFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index) { + if (regionInfoBuilder_ == null) { + return regionInfo_.get(index); } else { + return regionInfoBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getRegionInfoOrBuilderList() { + if (regionInfoBuilder_ != null) { + return regionInfoBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(regionInfo_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() { + return getRegionInfoFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder( + int index) { + return getRegionInfoFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); + } + public java.util.List + getRegionInfoBuilderList() { + return getRegionInfoFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> + getRegionInfoFieldBuilder() { + if (regionInfoBuilder_ == null) { + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( + regionInfo_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + regionInfo_ = null; + } + return regionInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetOnlineRegionResponse) + } + + static { + defaultInstance = new GetOnlineRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetOnlineRegionResponse) + } + + public interface OpenRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionSpecifier region = 1; + java.util.List + getRegionList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index); + int getRegionCount(); + java.util.List + getRegionOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( + int index); + + // optional uint32 versionOfOfflineNode = 2; + boolean hasVersionOfOfflineNode(); + int getVersionOfOfflineNode(); + } + public static final class OpenRegionRequest extends + com.google.protobuf.GeneratedMessage + implements OpenRegionRequestOrBuilder { + // Use OpenRegionRequest.newBuilder() to construct. + private OpenRegionRequest(Builder builder) { + super(builder); + } + private OpenRegionRequest(boolean noInit) {} + + private static final OpenRegionRequest defaultInstance; + public static OpenRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public OpenRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // repeated .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private java.util.List region_; + public java.util.List getRegionList() { + return region_; + } + public java.util.List + getRegionOrBuilderList() { + return region_; + } + public int getRegionCount() { + return region_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) { + return region_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( + int index) { + return region_.get(index); + } + + // optional uint32 versionOfOfflineNode = 2; + public static final int VERSIONOFOFFLINENODE_FIELD_NUMBER = 2; + private int versionOfOfflineNode_; + public boolean hasVersionOfOfflineNode() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getVersionOfOfflineNode() { + return versionOfOfflineNode_; + } + + private void initFields() { + region_ = java.util.Collections.emptyList(); + versionOfOfflineNode_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRegionCount(); i++) { + if (!getRegion(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < region_.size(); i++) { + output.writeMessage(1, region_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(2, versionOfOfflineNode_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < region_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, versionOfOfflineNode_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) obj; + + boolean result = true; + result = result && getRegionList() + .equals(other.getRegionList()); + result = result && (hasVersionOfOfflineNode() == other.hasVersionOfOfflineNode()); + if (hasVersionOfOfflineNode()) { + result = result && (getVersionOfOfflineNode() + == other.getVersionOfOfflineNode()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionCount() > 0) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegionList().hashCode(); + } + if (hasVersionOfOfflineNode()) { + hash = (37 * hash) + VERSIONOFOFFLINENODE_FIELD_NUMBER; + hash = (53 * hash) + getVersionOfOfflineNode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + regionBuilder_.clear(); + } + versionOfOfflineNode_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + region_ = java.util.Collections.unmodifiableList(region_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.versionOfOfflineNode_ = versionOfOfflineNode_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance()) return this; + if (regionBuilder_ == null) { + if (!other.region_.isEmpty()) { + if (region_.isEmpty()) { + region_ = other.region_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionIsMutable(); + region_.addAll(other.region_); + } + onChanged(); + } + } else { + if (!other.region_.isEmpty()) { + if (regionBuilder_.isEmpty()) { + regionBuilder_.dispose(); + regionBuilder_ = null; + region_ = other.region_; + bitField0_ = (bitField0_ & ~0x00000001); + regionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRegionFieldBuilder() : null; + } else { + regionBuilder_.addAllMessages(other.region_); + } + } + } + if (other.hasVersionOfOfflineNode()) { + setVersionOfOfflineNode(other.getVersionOfOfflineNode()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRegionCount(); i++) { + if (!getRegion(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionOfOfflineNode_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .RegionSpecifier region = 1; + private java.util.List region_ = + java.util.Collections.emptyList(); + private void ensureRegionIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + region_ = new java.util.ArrayList(region_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + + public java.util.List getRegionList() { + if (regionBuilder_ == null) { + return java.util.Collections.unmodifiableList(region_); + } else { + return regionBuilder_.getMessageList(); + } + } + public int getRegionCount() { + if (regionBuilder_ == null) { + return region_.size(); + } else { + return regionBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) { + if (regionBuilder_ == null) { + return region_.get(index); + } else { + return regionBuilder_.getMessage(index); + } + } + public Builder setRegion( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionIsMutable(); + region_.set(index, value); + onChanged(); + } else { + regionBuilder_.setMessage(index, value); + } + return this; + } + public Builder setRegion( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + region_.set(index, builderForValue.build()); + onChanged(); + } else { + regionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionIsMutable(); + region_.add(value); + onChanged(); + } else { + regionBuilder_.addMessage(value); + } + return this; + } + public Builder addRegion( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionIsMutable(); + region_.add(index, value); + onChanged(); + } else { + regionBuilder_.addMessage(index, value); + } + return this; + } + public Builder addRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + region_.add(builderForValue.build()); + onChanged(); + } else { + regionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addRegion( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + region_.add(index, builderForValue.build()); + onChanged(); + } else { + regionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllRegion( + java.lang.Iterable values) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + super.addAll(values, region_); + onChanged(); + } else { + regionBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + regionBuilder_.clear(); + } + return this; + } + public Builder removeRegion(int index) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + region_.remove(index); + onChanged(); + } else { + regionBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder( + int index) { + return getRegionFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( + int index) { + if (regionBuilder_ == null) { + return region_.get(index); } else { + return regionBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getRegionOrBuilderList() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(region_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder() { + return getRegionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder( + int index) { + return getRegionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()); + } + public java.util.List + getRegionBuilderList() { + return getRegionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint32 versionOfOfflineNode = 2; + private int versionOfOfflineNode_ ; + public boolean hasVersionOfOfflineNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfOfflineNode() { + return versionOfOfflineNode_; + } + public Builder setVersionOfOfflineNode(int value) { + bitField0_ |= 0x00000002; + versionOfOfflineNode_ = value; + onChanged(); + return this; + } + public Builder clearVersionOfOfflineNode() { + bitField0_ = (bitField0_ & ~0x00000002); + versionOfOfflineNode_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:OpenRegionRequest) + } + + static { + defaultInstance = new OpenRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OpenRegionRequest) + } + + public interface OpenRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + java.util.List getOpeningStateList(); + int getOpeningStateCount(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); + } + public static final class OpenRegionResponse extends + com.google.protobuf.GeneratedMessage + implements OpenRegionResponseOrBuilder { + // Use OpenRegionResponse.newBuilder() to construct. + private OpenRegionResponse(Builder builder) { + super(builder); + } + private OpenRegionResponse(boolean noInit) {} + + private static final OpenRegionResponse defaultInstance; + public static OpenRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public OpenRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + } + + public enum RegionOpeningState + implements com.google.protobuf.ProtocolMessageEnum { + OPENED(0, 0), + ALREADY_OPENED(1, 1), + FAILED_OPENING(2, 2), + ; + + public static final int OPENED_VALUE = 0; + public static final int ALREADY_OPENED_VALUE = 1; + public static final int FAILED_OPENING_VALUE = 2; + + + public final int getNumber() { return value; } + + public static RegionOpeningState valueOf(int value) { + switch (value) { + case 0: return OPENED; + case 1: return ALREADY_OPENED; + case 2: return FAILED_OPENING; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public RegionOpeningState findValueByNumber(int number) { + return RegionOpeningState.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); + } + + private static final RegionOpeningState[] VALUES = { + OPENED, ALREADY_OPENED, FAILED_OPENING, + }; + + public static RegionOpeningState valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private RegionOpeningState(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState) + } + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + public static final int OPENINGSTATE_FIELD_NUMBER = 1; + private java.util.List openingState_; + public java.util.List getOpeningStateList() { + return openingState_; + } + public int getOpeningStateCount() { + return openingState_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { + return openingState_.get(index); + } + + private void initFields() { + openingState_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < openingState_.size(); i++) { + output.writeEnum(1, openingState_.get(i).getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < openingState_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeEnumSizeNoTag(openingState_.get(i).getNumber()); + } + size += dataSize; + size += 1 * openingState_.size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) obj; + + boolean result = true; + result = result && getOpeningStateList() + .equals(other.getOpeningStateList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getOpeningStateCount() > 0) { + hash = (37 * hash) + OPENINGSTATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnumList(getOpeningStateList()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + openingState_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = java.util.Collections.unmodifiableList(openingState_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.openingState_ = openingState_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()) return this; + if (!other.openingState_.isEmpty()) { + if (openingState_.isEmpty()) { + openingState_ = other.openingState_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureOpeningStateIsMutable(); + openingState_.addAll(other.openingState_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + addOpeningState(value); + } + break; + } + case 10: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + addOpeningState(value); + } + } + input.popLimit(oldLimit); + break; + } + } + } + } + + private int bitField0_; + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + private java.util.List openingState_ = + java.util.Collections.emptyList(); + private void ensureOpeningStateIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = new java.util.ArrayList(openingState_); + bitField0_ |= 0x00000001; + } + } + public java.util.List getOpeningStateList() { + return java.util.Collections.unmodifiableList(openingState_); + } + public int getOpeningStateCount() { + return openingState_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { + return openingState_.get(index); + } + public Builder setOpeningState( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpeningStateIsMutable(); + openingState_.set(index, value); + onChanged(); + return this; + } + public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpeningStateIsMutable(); + openingState_.add(value); + onChanged(); + return this; + } + public Builder addAllOpeningState( + java.lang.Iterable values) { + ensureOpeningStateIsMutable(); + super.addAll(values, openingState_); + onChanged(); + return this; + } + public Builder clearOpeningState() { + openingState_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:OpenRegionResponse) + } + + static { + defaultInstance = new OpenRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OpenRegionResponse) + } + + public interface CloseRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional uint32 versionOfClosingNode = 2; + boolean hasVersionOfClosingNode(); + int getVersionOfClosingNode(); + + // optional bool transitionInZK = 3 [default = true]; + boolean hasTransitionInZK(); + boolean getTransitionInZK(); + } + public static final class CloseRegionRequest extends + com.google.protobuf.GeneratedMessage + implements CloseRegionRequestOrBuilder { + // Use CloseRegionRequest.newBuilder() to construct. + private CloseRegionRequest(Builder builder) { + super(builder); + } + private CloseRegionRequest(boolean noInit) {} + + private static final CloseRegionRequest defaultInstance; + public static CloseRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public CloseRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional uint32 versionOfClosingNode = 2; + public static final int VERSIONOFCLOSINGNODE_FIELD_NUMBER = 2; + private int versionOfClosingNode_; + public boolean hasVersionOfClosingNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfClosingNode() { + return versionOfClosingNode_; + } + + // optional bool transitionInZK = 3 [default = true]; + public static final int TRANSITIONINZK_FIELD_NUMBER = 3; + private boolean transitionInZK_; + public boolean hasTransitionInZK() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getTransitionInZK() { + return transitionInZK_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + versionOfClosingNode_ = 0; + transitionInZK_ = true; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, versionOfClosingNode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, transitionInZK_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, versionOfClosingNode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, transitionInZK_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasVersionOfClosingNode() == other.hasVersionOfClosingNode()); + if (hasVersionOfClosingNode()) { + result = result && (getVersionOfClosingNode() + == other.getVersionOfClosingNode()); + } + result = result && (hasTransitionInZK() == other.hasTransitionInZK()); + if (hasTransitionInZK()) { + result = result && (getTransitionInZK() + == other.getTransitionInZK()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasVersionOfClosingNode()) { + hash = (37 * hash) + VERSIONOFCLOSINGNODE_FIELD_NUMBER; + hash = (53 * hash) + getVersionOfClosingNode(); + } + if (hasTransitionInZK()) { + hash = (37 * hash) + TRANSITIONINZK_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getTransitionInZK()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + versionOfClosingNode_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + transitionInZK_ = true; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.versionOfClosingNode_ = versionOfClosingNode_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.transitionInZK_ = transitionInZK_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasVersionOfClosingNode()) { + setVersionOfClosingNode(other.getVersionOfClosingNode()); + } + if (other.hasTransitionInZK()) { + setTransitionInZK(other.getTransitionInZK()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionOfClosingNode_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + transitionInZK_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint32 versionOfClosingNode = 2; + private int versionOfClosingNode_ ; + public boolean hasVersionOfClosingNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfClosingNode() { + return versionOfClosingNode_; + } + public Builder setVersionOfClosingNode(int value) { + bitField0_ |= 0x00000002; + versionOfClosingNode_ = value; + onChanged(); + return this; + } + public Builder clearVersionOfClosingNode() { + bitField0_ = (bitField0_ & ~0x00000002); + versionOfClosingNode_ = 0; + onChanged(); + return this; + } + + // optional bool transitionInZK = 3 [default = true]; + private boolean transitionInZK_ = true; + public boolean hasTransitionInZK() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getTransitionInZK() { + return transitionInZK_; + } + public Builder setTransitionInZK(boolean value) { + bitField0_ |= 0x00000004; + transitionInZK_ = value; + onChanged(); + return this; + } + public Builder clearTransitionInZK() { + bitField0_ = (bitField0_ & ~0x00000004); + transitionInZK_ = true; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CloseRegionRequest) + } + + static { + defaultInstance = new CloseRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CloseRegionRequest) + } + + public interface CloseRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool closed = 1; + boolean hasClosed(); + boolean getClosed(); + } + public static final class CloseRegionResponse extends + com.google.protobuf.GeneratedMessage + implements CloseRegionResponseOrBuilder { + // Use CloseRegionResponse.newBuilder() to construct. + private CloseRegionResponse(Builder builder) { + super(builder); + } + private CloseRegionResponse(boolean noInit) {} + + private static final CloseRegionResponse defaultInstance; + public static CloseRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public CloseRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool closed = 1; + public static final int CLOSED_FIELD_NUMBER = 1; + private boolean closed_; + public boolean hasClosed() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getClosed() { + return closed_; + } + + private void initFields() { + closed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasClosed()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, closed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, closed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) obj; + + boolean result = true; + result = result && (hasClosed() == other.hasClosed()); + if (hasClosed()) { + result = result && (getClosed() + == other.getClosed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasClosed()) { + hash = (37 * hash) + CLOSED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getClosed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + closed_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.closed_ = closed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()) return this; + if (other.hasClosed()) { + setClosed(other.getClosed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasClosed()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + closed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool closed = 1; + private boolean closed_ ; + public boolean hasClosed() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getClosed() { + return closed_; + } + public Builder setClosed(boolean value) { + bitField0_ |= 0x00000001; + closed_ = value; + onChanged(); + return this; + } + public Builder clearClosed() { + bitField0_ = (bitField0_ & ~0x00000001); + closed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CloseRegionResponse) + } + + static { + defaultInstance = new CloseRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CloseRegionResponse) + } + + public interface FlushRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional uint64 ifOlderThanTs = 2; + boolean hasIfOlderThanTs(); + long getIfOlderThanTs(); + } + public static final class FlushRegionRequest extends + com.google.protobuf.GeneratedMessage + implements FlushRegionRequestOrBuilder { + // Use FlushRegionRequest.newBuilder() to construct. + private FlushRegionRequest(Builder builder) { + super(builder); + } + private FlushRegionRequest(boolean noInit) {} + + private static final FlushRegionRequest defaultInstance; + public static FlushRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public FlushRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional uint64 ifOlderThanTs = 2; + public static final int IFOLDERTHANTS_FIELD_NUMBER = 2; + private long ifOlderThanTs_; + public boolean hasIfOlderThanTs() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getIfOlderThanTs() { + return ifOlderThanTs_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + ifOlderThanTs_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, ifOlderThanTs_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, ifOlderThanTs_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasIfOlderThanTs() == other.hasIfOlderThanTs()); + if (hasIfOlderThanTs()) { + result = result && (getIfOlderThanTs() + == other.getIfOlderThanTs()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasIfOlderThanTs()) { + hash = (37 * hash) + IFOLDERTHANTS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getIfOlderThanTs()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + ifOlderThanTs_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ifOlderThanTs_ = ifOlderThanTs_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasIfOlderThanTs()) { + setIfOlderThanTs(other.getIfOlderThanTs()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ifOlderThanTs_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint64 ifOlderThanTs = 2; + private long ifOlderThanTs_ ; + public boolean hasIfOlderThanTs() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getIfOlderThanTs() { + return ifOlderThanTs_; + } + public Builder setIfOlderThanTs(long value) { + bitField0_ |= 0x00000002; + ifOlderThanTs_ = value; + onChanged(); + return this; + } + public Builder clearIfOlderThanTs() { + bitField0_ = (bitField0_ & ~0x00000002); + ifOlderThanTs_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:FlushRegionRequest) + } + + static { + defaultInstance = new FlushRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FlushRegionRequest) + } + + public interface FlushRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 lastFlushTime = 1; + boolean hasLastFlushTime(); + long getLastFlushTime(); + + // optional bool flushed = 2; + boolean hasFlushed(); + boolean getFlushed(); + } + public static final class FlushRegionResponse extends + com.google.protobuf.GeneratedMessage + implements FlushRegionResponseOrBuilder { + // Use FlushRegionResponse.newBuilder() to construct. + private FlushRegionResponse(Builder builder) { + super(builder); + } + private FlushRegionResponse(boolean noInit) {} + + private static final FlushRegionResponse defaultInstance; + public static FlushRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public FlushRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 lastFlushTime = 1; + public static final int LASTFLUSHTIME_FIELD_NUMBER = 1; + private long lastFlushTime_; + public boolean hasLastFlushTime() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLastFlushTime() { + return lastFlushTime_; + } + + // optional bool flushed = 2; + public static final int FLUSHED_FIELD_NUMBER = 2; + private boolean flushed_; + public boolean hasFlushed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getFlushed() { + return flushed_; + } + + private void initFields() { + lastFlushTime_ = 0L; + flushed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLastFlushTime()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, lastFlushTime_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, flushed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, lastFlushTime_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, flushed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) obj; + + boolean result = true; + result = result && (hasLastFlushTime() == other.hasLastFlushTime()); + if (hasLastFlushTime()) { + result = result && (getLastFlushTime() + == other.getLastFlushTime()); + } + result = result && (hasFlushed() == other.hasFlushed()); + if (hasFlushed()) { + result = result && (getFlushed() + == other.getFlushed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLastFlushTime()) { + hash = (37 * hash) + LASTFLUSHTIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLastFlushTime()); + } + if (hasFlushed()) { + hash = (37 * hash) + FLUSHED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getFlushed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lastFlushTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + flushed_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lastFlushTime_ = lastFlushTime_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.flushed_ = flushed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()) return this; + if (other.hasLastFlushTime()) { + setLastFlushTime(other.getLastFlushTime()); + } + if (other.hasFlushed()) { + setFlushed(other.getFlushed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLastFlushTime()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lastFlushTime_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + flushed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 lastFlushTime = 1; + private long lastFlushTime_ ; + public boolean hasLastFlushTime() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLastFlushTime() { + return lastFlushTime_; + } + public Builder setLastFlushTime(long value) { + bitField0_ |= 0x00000001; + lastFlushTime_ = value; + onChanged(); + return this; + } + public Builder clearLastFlushTime() { + bitField0_ = (bitField0_ & ~0x00000001); + lastFlushTime_ = 0L; + onChanged(); + return this; + } + + // optional bool flushed = 2; + private boolean flushed_ ; + public boolean hasFlushed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getFlushed() { + return flushed_; + } + public Builder setFlushed(boolean value) { + bitField0_ |= 0x00000002; + flushed_ = value; + onChanged(); + return this; + } + public Builder clearFlushed() { + bitField0_ = (bitField0_ & ~0x00000002); + flushed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:FlushRegionResponse) + } + + static { + defaultInstance = new FlushRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FlushRegionResponse) + } + + public interface SplitRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional bytes splitPoint = 2; + boolean hasSplitPoint(); + com.google.protobuf.ByteString getSplitPoint(); + } + public static final class SplitRegionRequest extends + com.google.protobuf.GeneratedMessage + implements SplitRegionRequestOrBuilder { + // Use SplitRegionRequest.newBuilder() to construct. + private SplitRegionRequest(Builder builder) { + super(builder); + } + private SplitRegionRequest(boolean noInit) {} + + private static final SplitRegionRequest defaultInstance; + public static SplitRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public SplitRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bytes splitPoint = 2; + public static final int SPLITPOINT_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString splitPoint_; + public boolean hasSplitPoint() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSplitPoint() { + return splitPoint_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + splitPoint_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, splitPoint_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, splitPoint_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasSplitPoint() == other.hasSplitPoint()); + if (hasSplitPoint()) { + result = result && getSplitPoint() + .equals(other.getSplitPoint()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasSplitPoint()) { + hash = (37 * hash) + SPLITPOINT_FIELD_NUMBER; + hash = (53 * hash) + getSplitPoint().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + splitPoint_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.splitPoint_ = splitPoint_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasSplitPoint()) { + setSplitPoint(other.getSplitPoint()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + bitField0_ |= 0x00000002; + splitPoint_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional bytes splitPoint = 2; + private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasSplitPoint() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSplitPoint() { + return splitPoint_; + } + public Builder setSplitPoint(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + splitPoint_ = value; + onChanged(); + return this; + } + public Builder clearSplitPoint() { + bitField0_ = (bitField0_ & ~0x00000002); + splitPoint_ = getDefaultInstance().getSplitPoint(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SplitRegionRequest) + } + + static { + defaultInstance = new SplitRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SplitRegionRequest) + } + + public interface SplitRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class SplitRegionResponse extends + com.google.protobuf.GeneratedMessage + implements SplitRegionResponseOrBuilder { + // Use SplitRegionResponse.newBuilder() to construct. + private SplitRegionResponse(Builder builder) { + super(builder); + } + private SplitRegionResponse(boolean noInit) {} + + private static final SplitRegionResponse defaultInstance; + public static SplitRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public SplitRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:SplitRegionResponse) + } + + static { + defaultInstance = new SplitRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SplitRegionResponse) + } + + public interface CompactRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional bool major = 2; + boolean hasMajor(); + boolean getMajor(); + } + public static final class CompactRegionRequest extends + com.google.protobuf.GeneratedMessage + implements CompactRegionRequestOrBuilder { + // Use CompactRegionRequest.newBuilder() to construct. + private CompactRegionRequest(Builder builder) { + super(builder); + } + private CompactRegionRequest(boolean noInit) {} + + private static final CompactRegionRequest defaultInstance; + public static CompactRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public CompactRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bool major = 2; + public static final int MAJOR_FIELD_NUMBER = 2; + private boolean major_; + public boolean hasMajor() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMajor() { + return major_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + major_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, major_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, major_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasMajor() == other.hasMajor()); + if (hasMajor()) { + result = result && (getMajor() + == other.getMajor()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasMajor()) { + hash = (37 * hash) + MAJOR_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMajor()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + major_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.major_ = major_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasMajor()) { + setMajor(other.getMajor()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + major_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional bool major = 2; + private boolean major_ ; + public boolean hasMajor() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMajor() { + return major_; + } + public Builder setMajor(boolean value) { + bitField0_ |= 0x00000002; + major_ = value; + onChanged(); + return this; + } + public Builder clearMajor() { + bitField0_ = (bitField0_ & ~0x00000002); + major_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CompactRegionRequest) + } + + static { + defaultInstance = new CompactRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CompactRegionRequest) + } + + public interface CompactRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CompactRegionResponse extends + com.google.protobuf.GeneratedMessage + implements CompactRegionResponseOrBuilder { + // Use CompactRegionResponse.newBuilder() to construct. + private CompactRegionResponse(Builder builder) { + super(builder); + } + private CompactRegionResponse(boolean noInit) {} + + private static final CompactRegionResponse defaultInstance; + public static CompactRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public CompactRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CompactRegionResponse) + } + + static { + defaultInstance = new CompactRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CompactRegionResponse) + } + + public interface UUIDOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 leastSigBits = 1; + boolean hasLeastSigBits(); + long getLeastSigBits(); + + // required uint64 mostSigBits = 2; + boolean hasMostSigBits(); + long getMostSigBits(); + } + public static final class UUID extends + com.google.protobuf.GeneratedMessage + implements UUIDOrBuilder { + // Use UUID.newBuilder() to construct. + private UUID(Builder builder) { + super(builder); + } + private UUID(boolean noInit) {} + + private static final UUID defaultInstance; + public static UUID getDefaultInstance() { + return defaultInstance; + } + + public UUID getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 leastSigBits = 1; + public static final int LEASTSIGBITS_FIELD_NUMBER = 1; + private long leastSigBits_; + public boolean hasLeastSigBits() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLeastSigBits() { + return leastSigBits_; + } + + // required uint64 mostSigBits = 2; + public static final int MOSTSIGBITS_FIELD_NUMBER = 2; + private long mostSigBits_; + public boolean hasMostSigBits() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getMostSigBits() { + return mostSigBits_; + } + + private void initFields() { + leastSigBits_ = 0L; + mostSigBits_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLeastSigBits()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMostSigBits()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, leastSigBits_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, mostSigBits_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, leastSigBits_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, mostSigBits_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) obj; + + boolean result = true; + result = result && (hasLeastSigBits() == other.hasLeastSigBits()); + if (hasLeastSigBits()) { + result = result && (getLeastSigBits() + == other.getLeastSigBits()); + } + result = result && (hasMostSigBits() == other.hasMostSigBits()); + if (hasMostSigBits()) { + result = result && (getMostSigBits() + == other.getMostSigBits()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLeastSigBits()) { + hash = (37 * hash) + LEASTSIGBITS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLeastSigBits()); + } + if (hasMostSigBits()) { + hash = (37 * hash) + MOSTSIGBITS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getMostSigBits()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + leastSigBits_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + mostSigBits_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.leastSigBits_ = leastSigBits_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.mostSigBits_ = mostSigBits_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) return this; + if (other.hasLeastSigBits()) { + setLeastSigBits(other.getLeastSigBits()); + } + if (other.hasMostSigBits()) { + setMostSigBits(other.getMostSigBits()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLeastSigBits()) { + + return false; + } + if (!hasMostSigBits()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + leastSigBits_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + mostSigBits_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 leastSigBits = 1; + private long leastSigBits_ ; + public boolean hasLeastSigBits() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLeastSigBits() { + return leastSigBits_; + } + public Builder setLeastSigBits(long value) { + bitField0_ |= 0x00000001; + leastSigBits_ = value; + onChanged(); + return this; + } + public Builder clearLeastSigBits() { + bitField0_ = (bitField0_ & ~0x00000001); + leastSigBits_ = 0L; + onChanged(); + return this; + } + + // required uint64 mostSigBits = 2; + private long mostSigBits_ ; + public boolean hasMostSigBits() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getMostSigBits() { + return mostSigBits_; + } + public Builder setMostSigBits(long value) { + bitField0_ |= 0x00000002; + mostSigBits_ = value; + onChanged(); + return this; + } + public Builder clearMostSigBits() { + bitField0_ = (bitField0_ & ~0x00000002); + mostSigBits_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UUID) + } + + static { + defaultInstance = new UUID(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UUID) + } + + public interface WALEntryOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .WALEntry.WALKey walKey = 1; + boolean hasWalKey(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getWalKey(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder(); + + // required .WALEntry.WALEdit edit = 2; + boolean hasEdit(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); + } + public static final class WALEntry extends + com.google.protobuf.GeneratedMessage + implements WALEntryOrBuilder { + // Use WALEntry.newBuilder() to construct. + private WALEntry(Builder builder) { + super(builder); + } + private WALEntry(boolean noInit) {} + + private static final WALEntry defaultInstance; + public static WALEntry getDefaultInstance() { + return defaultInstance; + } + + public WALEntry getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; + } + + public interface WALKeyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes encodedRegionName = 1; + boolean hasEncodedRegionName(); + com.google.protobuf.ByteString getEncodedRegionName(); + + // required bytes tableName = 2; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required uint64 logSequenceNumber = 3; + boolean hasLogSequenceNumber(); + long getLogSequenceNumber(); + + // required uint64 writeTime = 4; + boolean hasWriteTime(); + long getWriteTime(); + + // optional .UUID clusterId = 5; + boolean hasClusterId(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); + } + public static final class WALKey extends + com.google.protobuf.GeneratedMessage + implements WALKeyOrBuilder { + // Use WALKey.newBuilder() to construct. + private WALKey(Builder builder) { + super(builder); + } + private WALKey(boolean noInit) {} + + private static final WALKey defaultInstance; + public static WALKey getDefaultInstance() { + return defaultInstance; + } + + public WALKey getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + } + + private int bitField0_; + // required bytes encodedRegionName = 1; + public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString encodedRegionName_; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + + // required bytes tableName = 2; + public static final int TABLENAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required uint64 logSequenceNumber = 3; + public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3; + private long logSequenceNumber_; + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + + // required uint64 writeTime = 4; + public static final int WRITETIME_FIELD_NUMBER = 4; + private long writeTime_; + public boolean hasWriteTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getWriteTime() { + return writeTime_; + } + + // optional .UUID clusterId = 5; + public static final int CLUSTERID_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { + return clusterId_; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { + return clusterId_; + } + + private void initFields() { + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + tableName_ = com.google.protobuf.ByteString.EMPTY; + logSequenceNumber_ = 0L; + writeTime_ = 0L; + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasEncodedRegionName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLogSequenceNumber()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasWriteTime()) { + memoizedIsInitialized = 0; + return false; + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, encodedRegionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, logSequenceNumber_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(4, writeTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeMessage(5, clusterId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, encodedRegionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, logSequenceNumber_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, writeTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, clusterId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) obj; + + boolean result = true; + result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); + if (hasEncodedRegionName()) { + result = result && getEncodedRegionName() + .equals(other.getEncodedRegionName()); + } + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); + if (hasLogSequenceNumber()) { + result = result && (getLogSequenceNumber() + == other.getLogSequenceNumber()); + } + result = result && (hasWriteTime() == other.hasWriteTime()); + if (hasWriteTime()) { + result = result && (getWriteTime() + == other.getWriteTime()); + } + result = result && (hasClusterId() == other.hasClusterId()); + if (hasClusterId()) { + result = result && getClusterId() + .equals(other.getClusterId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEncodedRegionName()) { + hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getEncodedRegionName().hashCode(); + } + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasLogSequenceNumber()) { + hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLogSequenceNumber()); + } + if (hasWriteTime()) { + hash = (37 * hash) + WRITETIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getWriteTime()); + } + if (hasClusterId()) { + hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; + hash = (53 * hash) + getClusterId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getClusterIdFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + logSequenceNumber_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + writeTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.encodedRegionName_ = encodedRegionName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.logSequenceNumber_ = logSequenceNumber_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.writeTime_ = writeTime_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + if (clusterIdBuilder_ == null) { + result.clusterId_ = clusterId_; + } else { + result.clusterId_ = clusterIdBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; + if (other.hasEncodedRegionName()) { + setEncodedRegionName(other.getEncodedRegionName()); + } + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasLogSequenceNumber()) { + setLogSequenceNumber(other.getLogSequenceNumber()); + } + if (other.hasWriteTime()) { + setWriteTime(other.getWriteTime()); + } + if (other.hasClusterId()) { + mergeClusterId(other.getClusterId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasEncodedRegionName()) { + + return false; + } + if (!hasTableName()) { + + return false; + } + if (!hasLogSequenceNumber()) { + + return false; + } + if (!hasWriteTime()) { + + return false; + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + encodedRegionName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + logSequenceNumber_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + writeTime_ = input.readUInt64(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(); + if (hasClusterId()) { + subBuilder.mergeFrom(getClusterId()); + } + input.readMessage(subBuilder, extensionRegistry); + setClusterId(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes encodedRegionName = 1; + private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + encodedRegionName_ = value; + onChanged(); + return this; + } + public Builder clearEncodedRegionName() { + bitField0_ = (bitField0_ & ~0x00000001); + encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); + onChanged(); + return this; + } + + // required bytes tableName = 2; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000002); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required uint64 logSequenceNumber = 3; + private long logSequenceNumber_ ; + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + public Builder setLogSequenceNumber(long value) { + bitField0_ |= 0x00000004; + logSequenceNumber_ = value; + onChanged(); + return this; + } + public Builder clearLogSequenceNumber() { + bitField0_ = (bitField0_ & ~0x00000004); + logSequenceNumber_ = 0L; + onChanged(); + return this; + } + + // required uint64 writeTime = 4; + private long writeTime_ ; + public boolean hasWriteTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getWriteTime() { + return writeTime_; + } + public Builder setWriteTime(long value) { + bitField0_ |= 0x00000008; + writeTime_ = value; + onChanged(); + return this; + } + public Builder clearWriteTime() { + bitField0_ = (bitField0_ & ~0x00000008); + writeTime_ = 0L; + onChanged(); + return this; + } + + // optional .UUID clusterId = 5; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> clusterIdBuilder_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { + if (clusterIdBuilder_ == null) { + return clusterId_; + } else { + return clusterIdBuilder_.getMessage(); + } + } + public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { + if (clusterIdBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + clusterId_ = value; + onChanged(); + } else { + clusterIdBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setClusterId( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder builderForValue) { + if (clusterIdBuilder_ == null) { + clusterId_ = builderForValue.build(); + onChanged(); + } else { + clusterIdBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { + if (clusterIdBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + clusterId_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) { + clusterId_ = + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); + } else { + clusterId_ = value; + } + onChanged(); + } else { + clusterIdBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearClusterId() { + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); + onChanged(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder getClusterIdBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getClusterIdFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { + if (clusterIdBuilder_ != null) { + return clusterIdBuilder_.getMessageOrBuilder(); + } else { + return clusterId_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> + getClusterIdFieldBuilder() { + if (clusterIdBuilder_ == null) { + clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder>( + clusterId_, + getParentForChildren(), + isClean()); + clusterId_ = null; + } + return clusterIdBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALKey) + } + + static { + defaultInstance = new WALKey(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALKey) + } + + public interface WALEditOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes keyValue = 1; + java.util.List getKeyValueList(); + int getKeyValueCount(); + com.google.protobuf.ByteString getKeyValue(int index); + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + java.util.List + getFamilyScopeList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); + int getFamilyScopeCount(); + java.util.List + getFamilyScopeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index); + } + public static final class WALEdit extends + com.google.protobuf.GeneratedMessage + implements WALEditOrBuilder { + // Use WALEdit.newBuilder() to construct. + private WALEdit(Builder builder) { + super(builder); + } + private WALEdit(boolean noInit) {} + + private static final WALEdit defaultInstance; + public static WALEdit getDefaultInstance() { + return defaultInstance; + } + + public WALEdit getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + } + + public enum ScopeType + implements com.google.protobuf.ProtocolMessageEnum { + REPLICATION_SCOPE_LOCAL(0, 0), + REPLICATION_SCOPE_GLOBAL(1, 1), + ; + + public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; + public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; + + + public final int getNumber() { return value; } + + public static ScopeType valueOf(int value) { + switch (value) { + case 0: return REPLICATION_SCOPE_LOCAL; + case 1: return REPLICATION_SCOPE_GLOBAL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ScopeType findValueByNumber(int number) { + return ScopeType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); + } + + private static final ScopeType[] VALUES = { + REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, + }; + + public static ScopeType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private ScopeType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:WALEntry.WALEdit.ScopeType) + } + + public interface FamilyScopeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + boolean hasScopeType(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); + } + public static final class FamilyScope extends + com.google.protobuf.GeneratedMessage + implements FamilyScopeOrBuilder { + // Use FamilyScope.newBuilder() to construct. + private FamilyScope(Builder builder) { + super(builder); + } + private FamilyScope(boolean noInit) {} + + private static final FamilyScope defaultInstance; + public static FamilyScope getDefaultInstance() { + return defaultInstance; + } + + public FamilyScope getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + public static final int SCOPETYPE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_; + public boolean hasScopeType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { + return scopeType_; + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasScopeType()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, scopeType_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, scopeType_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasScopeType() == other.hasScopeType()); + if (hasScopeType()) { + result = result && + (getScopeType() == other.getScopeType()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasScopeType()) { + hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getScopeType()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.scopeType_ = scopeType_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasScopeType()) { + setScopeType(other.getScopeType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + if (!hasScopeType()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + scopeType_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + public boolean hasScopeType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { + return scopeType_; + } + public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + scopeType_ = value; + onChanged(); + return this; + } + public Builder clearScopeType() { + bitField0_ = (bitField0_ & ~0x00000002); + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit.FamilyScope) + } + + static { + defaultInstance = new FamilyScope(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope) + } + + // repeated bytes keyValue = 1; + public static final int KEYVALUE_FIELD_NUMBER = 1; + private java.util.List keyValue_; + public java.util.List + getKeyValueList() { + return keyValue_; + } + public int getKeyValueCount() { + return keyValue_.size(); + } + public com.google.protobuf.ByteString getKeyValue(int index) { + return keyValue_.get(index); + } + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + public static final int FAMILYSCOPE_FIELD_NUMBER = 2; + private java.util.List familyScope_; + public java.util.List getFamilyScopeList() { + return familyScope_; + } + public java.util.List + getFamilyScopeOrBuilderList() { + return familyScope_; + } + public int getFamilyScopeCount() { + return familyScope_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { + return familyScope_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index) { + return familyScope_.get(index); + } + + private void initFields() { + keyValue_ = java.util.Collections.emptyList();; + familyScope_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getFamilyScopeCount(); i++) { + if (!getFamilyScope(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < keyValue_.size(); i++) { + output.writeBytes(1, keyValue_.get(i)); + } + for (int i = 0; i < familyScope_.size(); i++) { + output.writeMessage(2, familyScope_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < keyValue_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(keyValue_.get(i)); + } + size += dataSize; + size += 1 * getKeyValueList().size(); + } + for (int i = 0; i < familyScope_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, familyScope_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) obj; + + boolean result = true; + result = result && getKeyValueList() + .equals(other.getKeyValueList()); + result = result && getFamilyScopeList() + .equals(other.getFamilyScopeList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getKeyValueCount() > 0) { + hash = (37 * hash) + KEYVALUE_FIELD_NUMBER; + hash = (53 * hash) + getKeyValueList().hashCode(); + } + if (getFamilyScopeCount() > 0) { + hash = (37 * hash) + FAMILYSCOPE_FIELD_NUMBER; + hash = (53 * hash) + getFamilyScopeList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFamilyScopeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + keyValue_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + if (familyScopeBuilder_ == null) { + familyScope_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + familyScopeBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + keyValue_ = java.util.Collections.unmodifiableList(keyValue_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.keyValue_ = keyValue_; + if (familyScopeBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + familyScope_ = java.util.Collections.unmodifiableList(familyScope_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.familyScope_ = familyScope_; + } else { + result.familyScope_ = familyScopeBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; + if (!other.keyValue_.isEmpty()) { + if (keyValue_.isEmpty()) { + keyValue_ = other.keyValue_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureKeyValueIsMutable(); + keyValue_.addAll(other.keyValue_); + } + onChanged(); + } + if (familyScopeBuilder_ == null) { + if (!other.familyScope_.isEmpty()) { + if (familyScope_.isEmpty()) { + familyScope_ = other.familyScope_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFamilyScopeIsMutable(); + familyScope_.addAll(other.familyScope_); + } + onChanged(); + } + } else { + if (!other.familyScope_.isEmpty()) { + if (familyScopeBuilder_.isEmpty()) { + familyScopeBuilder_.dispose(); + familyScopeBuilder_ = null; + familyScope_ = other.familyScope_; + bitField0_ = (bitField0_ & ~0x00000002); + familyScopeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyScopeFieldBuilder() : null; + } else { + familyScopeBuilder_.addAllMessages(other.familyScope_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getFamilyScopeCount(); i++) { + if (!getFamilyScope(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureKeyValueIsMutable(); + keyValue_.add(input.readBytes()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFamilyScope(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes keyValue = 1; + private java.util.List keyValue_ = java.util.Collections.emptyList();; + private void ensureKeyValueIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + keyValue_ = new java.util.ArrayList(keyValue_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getKeyValueList() { + return java.util.Collections.unmodifiableList(keyValue_); + } + public int getKeyValueCount() { + return keyValue_.size(); + } + public com.google.protobuf.ByteString getKeyValue(int index) { + return keyValue_.get(index); + } + public Builder setKeyValue( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueIsMutable(); + keyValue_.set(index, value); + onChanged(); + return this; + } + public Builder addKeyValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueIsMutable(); + keyValue_.add(value); + onChanged(); + return this; + } + public Builder addAllKeyValue( + java.lang.Iterable values) { + ensureKeyValueIsMutable(); + super.addAll(values, keyValue_); + onChanged(); + return this; + } + public Builder clearKeyValue() { + keyValue_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + private java.util.List familyScope_ = + java.util.Collections.emptyList(); + private void ensureFamilyScopeIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + familyScope_ = new java.util.ArrayList(familyScope_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; + + public java.util.List getFamilyScopeList() { + if (familyScopeBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyScope_); + } else { + return familyScopeBuilder_.getMessageList(); + } + } + public int getFamilyScopeCount() { + if (familyScopeBuilder_ == null) { + return familyScope_.size(); + } else { + return familyScopeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { + if (familyScopeBuilder_ == null) { + return familyScope_.get(index); + } else { + return familyScopeBuilder_.getMessage(index); + } + } + public Builder setFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.set(index, value); + onChanged(); + } else { + familyScopeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.set(index, builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.add(value); + onChanged(); + } else { + familyScopeBuilder_.addMessage(value); + } + return this; + } + public Builder addFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.add(index, value); + onChanged(); + } else { + familyScopeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFamilyScope( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.add(builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.add(index, builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFamilyScope( + java.lang.Iterable values) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + super.addAll(values, familyScope_); + onChanged(); + } else { + familyScopeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFamilyScope() { + if (familyScopeBuilder_ == null) { + familyScope_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + familyScopeBuilder_.clear(); + } + return this; + } + public Builder removeFamilyScope(int index) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.remove(index); + onChanged(); + } else { + familyScopeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( + int index) { + return getFamilyScopeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index) { + if (familyScopeBuilder_ == null) { + return familyScope_.get(index); } else { + return familyScopeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFamilyScopeOrBuilderList() { + if (familyScopeBuilder_ != null) { + return familyScopeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyScope_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { + return getFamilyScopeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( + int index) { + return getFamilyScopeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); + } + public java.util.List + getFamilyScopeBuilderList() { + return getFamilyScopeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> + getFamilyScopeFieldBuilder() { + if (familyScopeBuilder_ == null) { + familyScopeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>( + familyScope_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + familyScope_ = null; + } + return familyScopeBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit) + } + + static { + defaultInstance = new WALEdit(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALEdit) + } + + private int bitField0_; + // required .WALEntry.WALKey walKey = 1; + public static final int WALKEY_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey walKey_; + public boolean hasWalKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getWalKey() { + return walKey_; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { + return walKey_; + } + + // required .WALEntry.WALEdit edit = 2; + public static final int EDIT_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_; + public boolean hasEdit() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { + return edit_; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { + return edit_; + } + + private void initFields() { + walKey_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasWalKey()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasEdit()) { + memoizedIsInitialized = 0; + return false; + } + if (!getWalKey().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getEdit().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, walKey_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, edit_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, walKey_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, edit_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj; + + boolean result = true; + result = result && (hasWalKey() == other.hasWalKey()); + if (hasWalKey()) { + result = result && getWalKey() + .equals(other.getWalKey()); + } + result = result && (hasEdit() == other.hasEdit()); + if (hasEdit()) { + result = result && getEdit() + .equals(other.getEdit()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasWalKey()) { + hash = (37 * hash) + WALKEY_FIELD_NUMBER; + hash = (53 * hash) + getWalKey().hashCode(); + } + if (hasEdit()) { + hash = (37 * hash) + EDIT_FIELD_NUMBER; + hash = (53 * hash) + getEdit().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getWalKeyFieldBuilder(); + getEditFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (walKeyBuilder_ == null) { + walKey_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + } else { + walKeyBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (editBuilder_ == null) { + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } else { + editBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (walKeyBuilder_ == null) { + result.walKey_ = walKey_; + } else { + result.walKey_ = walKeyBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (editBuilder_ == null) { + result.edit_ = edit_; + } else { + result.edit_ = editBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()) return this; + if (other.hasWalKey()) { + mergeWalKey(other.getWalKey()); + } + if (other.hasEdit()) { + mergeEdit(other.getEdit()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasWalKey()) { + + return false; + } + if (!hasEdit()) { + + return false; + } + if (!getWalKey().isInitialized()) { + + return false; + } + if (!getEdit().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(); + if (hasWalKey()) { + subBuilder.mergeFrom(getWalKey()); + } + input.readMessage(subBuilder, extensionRegistry); + setWalKey(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(); + if (hasEdit()) { + subBuilder.mergeFrom(getEdit()); + } + input.readMessage(subBuilder, extensionRegistry); + setEdit(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .WALEntry.WALKey walKey = 1; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey walKey_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> walKeyBuilder_; + public boolean hasWalKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getWalKey() { + if (walKeyBuilder_ == null) { + return walKey_; + } else { + return walKeyBuilder_.getMessage(); + } + } + public Builder setWalKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { + if (walKeyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + walKey_ = value; + onChanged(); + } else { + walKeyBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setWalKey( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder builderForValue) { + if (walKeyBuilder_ == null) { + walKey_ = builderForValue.build(); + onChanged(); + } else { + walKeyBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeWalKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { + if (walKeyBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + walKey_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) { + walKey_ = + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(walKey_).mergeFrom(value).buildPartial(); + } else { + walKey_ = value; + } + onChanged(); + } else { + walKeyBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearWalKey() { + if (walKeyBuilder_ == null) { + walKey_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + onChanged(); + } else { + walKeyBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder getWalKeyBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getWalKeyFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { + if (walKeyBuilder_ != null) { + return walKeyBuilder_.getMessageOrBuilder(); + } else { + return walKey_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> + getWalKeyFieldBuilder() { + if (walKeyBuilder_ == null) { + walKeyBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder>( + walKey_, + getParentForChildren(), + isClean()); + walKey_ = null; + } + return walKeyBuilder_; + } + + // required .WALEntry.WALEdit edit = 2; + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; + public boolean hasEdit() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { + if (editBuilder_ == null) { + return edit_; + } else { + return editBuilder_.getMessage(); + } + } + public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { + if (editBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + edit_ = value; + onChanged(); + } else { + editBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setEdit( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder builderForValue) { + if (editBuilder_ == null) { + edit_ = builderForValue.build(); + onChanged(); + } else { + editBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { + if (editBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + edit_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) { + edit_ = + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); + } else { + edit_ = value; + } + onChanged(); + } else { + editBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearEdit() { + if (editBuilder_ == null) { + edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + onChanged(); + } else { + editBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getEditFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { + if (editBuilder_ != null) { + return editBuilder_.getMessageOrBuilder(); + } else { + return edit_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> + getEditFieldBuilder() { + if (editBuilder_ == null) { + editBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder>( + edit_, + getParentForChildren(), + isClean()); + edit_ = null; + } + return editBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry) + } + + static { + defaultInstance = new WALEntry(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry) + } + + public interface ReplicateWALEntryRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .WALEntry walEntry = 1; + java.util.List + getWalEntryList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getWalEntry(int index); + int getWalEntryCount(); + java.util.List + getWalEntryOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + int index); + } + public static final class ReplicateWALEntryRequest extends + com.google.protobuf.GeneratedMessage + implements ReplicateWALEntryRequestOrBuilder { + // Use ReplicateWALEntryRequest.newBuilder() to construct. + private ReplicateWALEntryRequest(Builder builder) { + super(builder); + } + private ReplicateWALEntryRequest(boolean noInit) {} + + private static final ReplicateWALEntryRequest defaultInstance; + public static ReplicateWALEntryRequest getDefaultInstance() { + return defaultInstance; + } + + public ReplicateWALEntryRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + } + + // repeated .WALEntry walEntry = 1; + public static final int WALENTRY_FIELD_NUMBER = 1; + private java.util.List walEntry_; + public java.util.List getWalEntryList() { + return walEntry_; + } + public java.util.List + getWalEntryOrBuilderList() { + return walEntry_; + } + public int getWalEntryCount() { + return walEntry_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getWalEntry(int index) { + return walEntry_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + int index) { + return walEntry_.get(index); + } + + private void initFields() { + walEntry_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getWalEntryCount(); i++) { + if (!getWalEntry(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < walEntry_.size(); i++) { + output.writeMessage(1, walEntry_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < walEntry_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, walEntry_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) obj; + + boolean result = true; + result = result && getWalEntryList() + .equals(other.getWalEntryList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getWalEntryCount() > 0) { + hash = (37 * hash) + WALENTRY_FIELD_NUMBER; + hash = (53 * hash) + getWalEntryList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getWalEntryFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (walEntryBuilder_ == null) { + walEntry_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + walEntryBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest(this); + int from_bitField0_ = bitField0_; + if (walEntryBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + walEntry_ = java.util.Collections.unmodifiableList(walEntry_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.walEntry_ = walEntry_; + } else { + result.walEntry_ = walEntryBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this; + if (walEntryBuilder_ == null) { + if (!other.walEntry_.isEmpty()) { + if (walEntry_.isEmpty()) { + walEntry_ = other.walEntry_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureWalEntryIsMutable(); + walEntry_.addAll(other.walEntry_); + } + onChanged(); + } + } else { + if (!other.walEntry_.isEmpty()) { + if (walEntryBuilder_.isEmpty()) { + walEntryBuilder_.dispose(); + walEntryBuilder_ = null; + walEntry_ = other.walEntry_; + bitField0_ = (bitField0_ & ~0x00000001); + walEntryBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getWalEntryFieldBuilder() : null; + } else { + walEntryBuilder_.addAllMessages(other.walEntry_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getWalEntryCount(); i++) { + if (!getWalEntry(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addWalEntry(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .WALEntry walEntry = 1; + private java.util.List walEntry_ = + java.util.Collections.emptyList(); + private void ensureWalEntryIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + walEntry_ = new java.util.ArrayList(walEntry_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> walEntryBuilder_; + + public java.util.List getWalEntryList() { + if (walEntryBuilder_ == null) { + return java.util.Collections.unmodifiableList(walEntry_); + } else { + return walEntryBuilder_.getMessageList(); + } + } + public int getWalEntryCount() { + if (walEntryBuilder_ == null) { + return walEntry_.size(); + } else { + return walEntryBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry getWalEntry(int index) { + if (walEntryBuilder_ == null) { + return walEntry_.get(index); + } else { + return walEntryBuilder_.getMessage(index); + } + } + public Builder setWalEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { + if (walEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWalEntryIsMutable(); + walEntry_.set(index, value); + onChanged(); + } else { + walEntryBuilder_.setMessage(index, value); + } + return this; + } + public Builder setWalEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + walEntry_.set(index, builderForValue.build()); + onChanged(); + } else { + walEntryBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addWalEntry(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { + if (walEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWalEntryIsMutable(); + walEntry_.add(value); + onChanged(); + } else { + walEntryBuilder_.addMessage(value); + } + return this; + } + public Builder addWalEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry value) { + if (walEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWalEntryIsMutable(); + walEntry_.add(index, value); + onChanged(); + } else { + walEntryBuilder_.addMessage(index, value); + } + return this; + } + public Builder addWalEntry( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + walEntry_.add(builderForValue.build()); + onChanged(); + } else { + walEntryBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addWalEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder builderForValue) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + walEntry_.add(index, builderForValue.build()); + onChanged(); + } else { + walEntryBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllWalEntry( + java.lang.Iterable values) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + super.addAll(values, walEntry_); + onChanged(); + } else { + walEntryBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearWalEntry() { + if (walEntryBuilder_ == null) { + walEntry_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + walEntryBuilder_.clear(); + } + return this; + } + public Builder removeWalEntry(int index) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + walEntry_.remove(index); + onChanged(); + } else { + walEntryBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder getWalEntryBuilder( + int index) { + return getWalEntryFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + int index) { + if (walEntryBuilder_ == null) { + return walEntry_.get(index); } else { + return walEntryBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getWalEntryOrBuilderList() { + if (walEntryBuilder_ != null) { + return walEntryBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(walEntry_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addWalEntryBuilder() { + return getWalEntryFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder addWalEntryBuilder( + int index) { + return getWalEntryFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.getDefaultInstance()); + } + public java.util.List + getWalEntryBuilderList() { + return getWalEntryFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder> + getWalEntryFieldBuilder() { + if (walEntryBuilder_ == null) { + walEntryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryOrBuilder>( + walEntry_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + walEntry_ = null; + } + return walEntryBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ReplicateWALEntryRequest) + } + + static { + defaultInstance = new ReplicateWALEntryRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicateWALEntryRequest) + } + + public interface ReplicateWALEntryResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ReplicateWALEntryResponse extends + com.google.protobuf.GeneratedMessage + implements ReplicateWALEntryResponseOrBuilder { + // Use ReplicateWALEntryResponse.newBuilder() to construct. + private ReplicateWALEntryResponse(Builder builder) { + super(builder); + } + private ReplicateWALEntryResponse(boolean noInit) {} + + private static final ReplicateWALEntryResponse defaultInstance; + public static ReplicateWALEntryResponse getDefaultInstance() { + return defaultInstance; + } + + public ReplicateWALEntryResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ReplicateWALEntryResponse) + } + + static { + defaultInstance = new ReplicateWALEntryResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicateWALEntryResponse) + } + + public interface RollWALWriterRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class RollWALWriterRequest extends + com.google.protobuf.GeneratedMessage + implements RollWALWriterRequestOrBuilder { + // Use RollWALWriterRequest.newBuilder() to construct. + private RollWALWriterRequest(Builder builder) { + super(builder); + } + private RollWALWriterRequest(boolean noInit) {} + + private static final RollWALWriterRequest defaultInstance; + public static RollWALWriterRequest getDefaultInstance() { + return defaultInstance; + } + + public RollWALWriterRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:RollWALWriterRequest) + } + + static { + defaultInstance = new RollWALWriterRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RollWALWriterRequest) + } + + public interface RollWALWriterResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes regionToFlush = 1; + java.util.List getRegionToFlushList(); + int getRegionToFlushCount(); + com.google.protobuf.ByteString getRegionToFlush(int index); + } + public static final class RollWALWriterResponse extends + com.google.protobuf.GeneratedMessage + implements RollWALWriterResponseOrBuilder { + // Use RollWALWriterResponse.newBuilder() to construct. + private RollWALWriterResponse(Builder builder) { + super(builder); + } + private RollWALWriterResponse(boolean noInit) {} + + private static final RollWALWriterResponse defaultInstance; + public static RollWALWriterResponse getDefaultInstance() { + return defaultInstance; + } + + public RollWALWriterResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + } + + // repeated bytes regionToFlush = 1; + public static final int REGIONTOFLUSH_FIELD_NUMBER = 1; + private java.util.List regionToFlush_; + public java.util.List + getRegionToFlushList() { + return regionToFlush_; + } + public int getRegionToFlushCount() { + return regionToFlush_.size(); + } + public com.google.protobuf.ByteString getRegionToFlush(int index) { + return regionToFlush_.get(index); + } + + private void initFields() { + regionToFlush_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < regionToFlush_.size(); i++) { + output.writeBytes(1, regionToFlush_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < regionToFlush_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(regionToFlush_.get(i)); + } + size += dataSize; + size += 1 * getRegionToFlushList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) obj; + + boolean result = true; + result = result && getRegionToFlushList() + .equals(other.getRegionToFlushList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionToFlushCount() > 0) { + hash = (37 * hash) + REGIONTOFLUSH_FIELD_NUMBER; + hash = (53 * hash) + getRegionToFlushList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + regionToFlush_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regionToFlush_ = regionToFlush_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()) return this; + if (!other.regionToFlush_.isEmpty()) { + if (regionToFlush_.isEmpty()) { + regionToFlush_ = other.regionToFlush_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionToFlushIsMutable(); + regionToFlush_.addAll(other.regionToFlush_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureRegionToFlushIsMutable(); + regionToFlush_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes regionToFlush = 1; + private java.util.List regionToFlush_ = java.util.Collections.emptyList();; + private void ensureRegionToFlushIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regionToFlush_ = new java.util.ArrayList(regionToFlush_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getRegionToFlushList() { + return java.util.Collections.unmodifiableList(regionToFlush_); + } + public int getRegionToFlushCount() { + return regionToFlush_.size(); + } + public com.google.protobuf.ByteString getRegionToFlush(int index) { + return regionToFlush_.get(index); + } + public Builder setRegionToFlush( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionToFlushIsMutable(); + regionToFlush_.set(index, value); + onChanged(); + return this; + } + public Builder addRegionToFlush(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionToFlushIsMutable(); + regionToFlush_.add(value); + onChanged(); + return this; + } + public Builder addAllRegionToFlush( + java.lang.Iterable values) { + ensureRegionToFlushIsMutable(); + super.addAll(values, regionToFlush_); + onChanged(); + return this; + } + public Builder clearRegionToFlush() { + regionToFlush_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RollWALWriterResponse) + } + + static { + defaultInstance = new RollWALWriterResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RollWALWriterResponse) + } + + public interface StopServerRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string reason = 1; + boolean hasReason(); + String getReason(); + } + public static final class StopServerRequest extends + com.google.protobuf.GeneratedMessage + implements StopServerRequestOrBuilder { + // Use StopServerRequest.newBuilder() to construct. + private StopServerRequest(Builder builder) { + super(builder); + } + private StopServerRequest(boolean noInit) {} + + private static final StopServerRequest defaultInstance; + public static StopServerRequest getDefaultInstance() { + return defaultInstance; + } + + public StopServerRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + } + + private int bitField0_; + // required string reason = 1; + public static final int REASON_FIELD_NUMBER = 1; + private java.lang.Object reason_; + public boolean hasReason() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getReason() { + java.lang.Object ref = reason_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + reason_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getReasonBytes() { + java.lang.Object ref = reason_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + reason_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + reason_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasReason()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getReasonBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getReasonBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) obj; + + boolean result = true; + result = result && (hasReason() == other.hasReason()); + if (hasReason()) { + result = result && getReason() + .equals(other.getReason()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasReason()) { + hash = (37 * hash) + REASON_FIELD_NUMBER; + hash = (53 * hash) + getReason().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + reason_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.reason_ = reason_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance()) return this; + if (other.hasReason()) { + setReason(other.getReason()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasReason()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + reason_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string reason = 1; + private java.lang.Object reason_ = ""; + public boolean hasReason() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getReason() { + java.lang.Object ref = reason_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + reason_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setReason(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + reason_ = value; + onChanged(); + return this; + } + public Builder clearReason() { + bitField0_ = (bitField0_ & ~0x00000001); + reason_ = getDefaultInstance().getReason(); + onChanged(); + return this; + } + void setReason(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + reason_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:StopServerRequest) + } + + static { + defaultInstance = new StopServerRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StopServerRequest) + } + + public interface StopServerResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class StopServerResponse extends + com.google.protobuf.GeneratedMessage + implements StopServerResponseOrBuilder { + // Use StopServerResponse.newBuilder() to construct. + private StopServerResponse(Builder builder) { + super(builder); + } + private StopServerResponse(boolean noInit) {} + + private static final StopServerResponse defaultInstance; + public static StopServerResponse getDefaultInstance() { + return defaultInstance; + } + + public StopServerResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:StopServerResponse) + } + + static { + defaultInstance = new StopServerResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StopServerResponse) + } + + public interface GetServerInfoRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class GetServerInfoRequest extends + com.google.protobuf.GeneratedMessage + implements GetServerInfoRequestOrBuilder { + // Use GetServerInfoRequest.newBuilder() to construct. + private GetServerInfoRequest(Builder builder) { + super(builder); + } + private GetServerInfoRequest(boolean noInit) {} + + private static final GetServerInfoRequest defaultInstance; + public static GetServerInfoRequest getDefaultInstance() { + return defaultInstance; + } + + public GetServerInfoRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:GetServerInfoRequest) + } + + static { + defaultInstance = new GetServerInfoRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetServerInfoRequest) + } + + public interface GetServerInfoResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerName serverName = 1; + boolean hasServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); + } + public static final class GetServerInfoResponse extends + com.google.protobuf.GeneratedMessage + implements GetServerInfoResponseOrBuilder { + // Use GetServerInfoResponse.newBuilder() to construct. + private GetServerInfoResponse(Builder builder) { + super(builder); + } + private GetServerInfoResponse(boolean noInit) {} + + private static final GetServerInfoResponse defaultInstance; + public static GetServerInfoResponse getDefaultInstance() { + return defaultInstance; + } + + public GetServerInfoResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerName serverName = 1; + public static final int SERVERNAME_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + return serverName_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + return serverName_; + } + + private void initFields() { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServerName()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServerName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, serverName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, serverName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) obj; + + boolean result = true; + result = result && (hasServerName() == other.hasServerName()); + if (hasServerName()) { + result = result && getServerName() + .equals(other.getServerName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServerName()) { + hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; + hash = (53 * hash) + getServerName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerNameFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverNameBuilder_ == null) { + result.serverName_ = serverName_; + } else { + result.serverName_ = serverNameBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()) return this; + if (other.hasServerName()) { + mergeServerName(other.getServerName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasServerName()) { + + return false; + } + if (!getServerName().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServerName()) { + subBuilder.mergeFrom(getServerName()); + } + input.readMessage(subBuilder, extensionRegistry); + setServerName(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerName serverName = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + if (serverNameBuilder_ == null) { + return serverName_; + } else { + return serverNameBuilder_.getMessage(); + } + } + public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serverName_ = value; + onChanged(); + } else { + serverNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setServerName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + serverName_ = builderForValue.build(); + onChanged(); + } else { + serverNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + serverName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); + } else { + serverName_ = value; + } + onChanged(); + } else { + serverNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearServerName() { + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerNameFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + if (serverNameBuilder_ != null) { + return serverNameBuilder_.getMessageOrBuilder(); + } else { + return serverName_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerNameFieldBuilder() { + if (serverNameBuilder_ == null) { + serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + serverName_, + getParentForChildren(), + isClean()); + serverName_ = null; + } + return serverNameBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetServerInfoResponse) + } + + static { + defaultInstance = new GetServerInfoResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetServerInfoResponse) + } + + public static abstract class AdminService + implements com.google.protobuf.Service { + protected AdminService() {} + + public interface Interface { + public abstract void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new AdminService() { + @java.lang.Override + public void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done) { + impl.getRegionInfo(controller, request, done); + } + + @java.lang.Override + public void getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done) { + impl.getStoreFileList(controller, request, done); + } + + @java.lang.Override + public void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.getOnlineRegion(controller, request, done); + } + + @java.lang.Override + public void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.openRegion(controller, request, done); + } + + @java.lang.Override + public void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.closeRegion(controller, request, done); + } + + @java.lang.Override + public void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.flushRegion(controller, request, done); + } + + @java.lang.Override + public void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.splitRegion(controller, request, done); + } + + @java.lang.Override + public void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.compactRegion(controller, request, done); + } + + @java.lang.Override + public void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done) { + impl.replicateWALEntry(controller, request, done); + } + + @java.lang.Override + public void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done) { + impl.rollWALWriter(controller, request, done); + } + + @java.lang.Override + public void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done) { + impl.getServerInfo(controller, request, done); + } + + @java.lang.Override + public void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done) { + impl.stopServer(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request); + case 1: + return impl.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest)request); + case 2: + return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request); + case 3: + return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request); + case 4: + return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request); + case 5: + return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request); + case 6: + return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request); + case 7: + return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request); + case 8: + return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request); + case 9: + return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request); + case 10: + return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request); + case 11: + return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 7: + this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 8: + this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 9: + this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 10: + this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 11: + this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance())); + } + + public void getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance())); + } + + public void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance())); + } + + public void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance())); + } + + public void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance())); + } + + public void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance())); + } + + public void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance())); + } + + public void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance())); + } + + public void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance())); + } + + public void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance())); + } + + public void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance())); + } + + public void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRegionInfoRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRegionInfoRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRegionInfoResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRegionInfoResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetStoreFileListRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetStoreFileListRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetStoreFileListResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetStoreFileListResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetOnlineRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetOnlineRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetOnlineRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetOnlineRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OpenRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OpenRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OpenRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OpenRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CloseRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CloseRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CloseRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CloseRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FlushRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FlushRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FlushRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FlushRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SplitRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SplitRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SplitRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SplitRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CompactRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CompactRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CompactRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CompactRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UUID_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UUID_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALKey_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALKey_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALEdit_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALEdit_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicateWALEntryRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicateWALEntryResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RollWALWriterRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RollWALWriterRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RollWALWriterResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RollWALWriterResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_StopServerRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StopServerRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_StopServerResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StopServerResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetServerInfoRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetServerInfoRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetServerInfoResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetServerInfoResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\013Admin.proto\032\013hbase.proto\"8\n\024GetRegionI" + + "nfoRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" + + "fier\"8\n\025GetRegionInfoResponse\022\037\n\nregionI" + + "nfo\030\001 \002(\0132\013.RegionInfo\"Q\n\027GetStoreFileLi" + + "stRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecif" + + "ier\022\024\n\014columnFamily\030\002 \003(\014\"-\n\030GetStoreFil" + + "eListResponse\022\021\n\tstoreFile\030\001 \003(\t\"\030\n\026GetO" + + "nlineRegionRequest\":\n\027GetOnlineRegionRes" + + "ponse\022\037\n\nregionInfo\030\001 \003(\0132\013.RegionInfo\"S" + + "\n\021OpenRegionRequest\022 \n\006region\030\001 \003(\0132\020.Re", + "gionSpecifier\022\034\n\024versionOfOfflineNode\030\002 " + + "\001(\r\"\234\001\n\022OpenRegionResponse\022<\n\014openingSta" + + "te\030\001 \003(\0162&.OpenRegionResponse.RegionOpen" + + "ingState\"H\n\022RegionOpeningState\022\n\n\006OPENED" + + "\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022\n\016FAILED_OPENING" + + "\020\002\"r\n\022CloseRegionRequest\022 \n\006region\030\001 \002(\013" + + "2\020.RegionSpecifier\022\034\n\024versionOfClosingNo" + + "de\030\002 \001(\r\022\034\n\016transitionInZK\030\003 \001(\010:\004true\"%" + + "\n\023CloseRegionResponse\022\016\n\006closed\030\001 \002(\010\"M\n" + + "\022FlushRegionRequest\022 \n\006region\030\001 \002(\0132\020.Re", + "gionSpecifier\022\025\n\rifOlderThanTs\030\002 \001(\004\"=\n\023" + + "FlushRegionResponse\022\025\n\rlastFlushTime\030\001 \002" + + "(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022SplitRegionReques" + + "t\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\022\n\ns" + + "plitPoint\030\002 \001(\014\"\025\n\023SplitRegionResponse\"G" + + "\n\024CompactRegionRequest\022 \n\006region\030\001 \002(\0132\020" + + ".RegionSpecifier\022\r\n\005major\030\002 \001(\010\"\027\n\025Compa" + + "ctRegionResponse\"1\n\004UUID\022\024\n\014leastSigBits" + + "\030\001 \002(\004\022\023\n\013mostSigBits\030\002 \002(\004\"\266\003\n\010WALEntry" + + "\022 \n\006walKey\030\001 \002(\0132\020.WALEntry.WALKey\022\037\n\004ed", + "it\030\002 \002(\0132\021.WALEntry.WALEdit\032~\n\006WALKey\022\031\n" + + "\021encodedRegionName\030\001 \002(\014\022\021\n\ttableName\030\002 " + + "\002(\014\022\031\n\021logSequenceNumber\030\003 \002(\004\022\021\n\twriteT" + + "ime\030\004 \002(\004\022\030\n\tclusterId\030\005 \001(\0132\005.UUID\032\346\001\n\007" + + "WALEdit\022\020\n\010keyValue\030\001 \003(\014\0222\n\013familyScope" + + "\030\002 \003(\0132\035.WALEntry.WALEdit.FamilyScope\032M\n" + + "\013FamilyScope\022\016\n\006family\030\001 \002(\014\022.\n\tscopeTyp" + + "e\030\002 \002(\0162\033.WALEntry.WALEdit.ScopeType\"F\n\t" + + "ScopeType\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034" + + "\n\030REPLICATION_SCOPE_GLOBAL\020\001\"7\n\030Replicat", + "eWALEntryRequest\022\033\n\010walEntry\030\001 \003(\0132\t.WAL" + + "Entry\"\033\n\031ReplicateWALEntryResponse\"\026\n\024Ro" + + "llWALWriterRequest\".\n\025RollWALWriterRespo" + + "nse\022\025\n\rregionToFlush\030\001 \003(\014\"#\n\021StopServer" + + "Request\022\016\n\006reason\030\001 \002(\t\"\024\n\022StopServerRes" + + "ponse\"\026\n\024GetServerInfoRequest\"8\n\025GetServ" + + "erInfoResponse\022\037\n\nserverName\030\001 \002(\0132\013.Ser" + + "verName2\205\006\n\014AdminService\022>\n\rgetRegionInf" + + "o\022\025.GetRegionInfoRequest\032\026.GetRegionInfo" + + "Response\022G\n\020getStoreFileList\022\030.GetStoreF", + "ileListRequest\032\031.GetStoreFileListRespons" + + "e\022D\n\017getOnlineRegion\022\027.GetOnlineRegionRe" + + "quest\032\030.GetOnlineRegionResponse\0225\n\nopenR" + + "egion\022\022.OpenRegionRequest\032\023.OpenRegionRe" + + "sponse\0228\n\013closeRegion\022\023.CloseRegionReque" + + "st\032\024.CloseRegionResponse\0228\n\013flushRegion\022" + + "\023.FlushRegionRequest\032\024.FlushRegionRespon" + + "se\0228\n\013splitRegion\022\023.SplitRegionRequest\032\024" + + ".SplitRegionResponse\022>\n\rcompactRegion\022\025." + + "CompactRegionRequest\032\026.CompactRegionResp", + "onse\022J\n\021replicateWALEntry\022\031.ReplicateWAL" + + "EntryRequest\032\032.ReplicateWALEntryResponse" + + "\022>\n\rrollWALWriter\022\025.RollWALWriterRequest" + + "\032\026.RollWALWriterResponse\022>\n\rgetServerInf" + + "o\022\025.GetServerInfoRequest\032\026.GetServerInfo" + + "Response\0225\n\nstopServer\022\022.StopServerReque" + + "st\032\023.StopServerResponseBA\n*org.apache.ha" + + "doop.hbase.protobuf.generatedB\013AdminProt" + + "osH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_GetRegionInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_GetRegionInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRegionInfoRequest_descriptor, + new java.lang.String[] { "Region", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); + internal_static_GetRegionInfoResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_GetRegionInfoResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRegionInfoResponse_descriptor, + new java.lang.String[] { "RegionInfo", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); + internal_static_GetStoreFileListRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_GetStoreFileListRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetStoreFileListRequest_descriptor, + new java.lang.String[] { "Region", "ColumnFamily", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListRequest.Builder.class); + internal_static_GetStoreFileListResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_GetStoreFileListResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetStoreFileListResponse_descriptor, + new java.lang.String[] { "StoreFile", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileListResponse.Builder.class); + internal_static_GetOnlineRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_GetOnlineRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetOnlineRegionRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); + internal_static_GetOnlineRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_GetOnlineRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetOnlineRegionResponse_descriptor, + new java.lang.String[] { "RegionInfo", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); + internal_static_OpenRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_OpenRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OpenRegionRequest_descriptor, + new java.lang.String[] { "Region", "VersionOfOfflineNode", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); + internal_static_OpenRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_OpenRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OpenRegionResponse_descriptor, + new java.lang.String[] { "OpeningState", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); + internal_static_CloseRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_CloseRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CloseRegionRequest_descriptor, + new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); + internal_static_CloseRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_CloseRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CloseRegionResponse_descriptor, + new java.lang.String[] { "Closed", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); + internal_static_FlushRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_FlushRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FlushRegionRequest_descriptor, + new java.lang.String[] { "Region", "IfOlderThanTs", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); + internal_static_FlushRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_FlushRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FlushRegionResponse_descriptor, + new java.lang.String[] { "LastFlushTime", "Flushed", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); + internal_static_SplitRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_SplitRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SplitRegionRequest_descriptor, + new java.lang.String[] { "Region", "SplitPoint", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); + internal_static_SplitRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_SplitRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SplitRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionResponse.Builder.class); + internal_static_CompactRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_CompactRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CompactRegionRequest_descriptor, + new java.lang.String[] { "Region", "Major", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest.Builder.class); + internal_static_CompactRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_CompactRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CompactRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionResponse.Builder.class); + internal_static_UUID_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_UUID_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UUID_descriptor, + new java.lang.String[] { "LeastSigBits", "MostSigBits", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class); + internal_static_WALEntry_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_WALEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_descriptor, + new java.lang.String[] { "WalKey", "Edit", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class); + internal_static_WALEntry_WALKey_descriptor = + internal_static_WALEntry_descriptor.getNestedTypes().get(0); + internal_static_WALEntry_WALKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALKey_descriptor, + new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class); + internal_static_WALEntry_WALEdit_descriptor = + internal_static_WALEntry_descriptor.getNestedTypes().get(1); + internal_static_WALEntry_WALEdit_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALEdit_descriptor, + new java.lang.String[] { "KeyValue", "FamilyScope", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class); + internal_static_WALEntry_WALEdit_FamilyScope_descriptor = + internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0); + internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALEdit_FamilyScope_descriptor, + new java.lang.String[] { "Family", "ScopeType", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); + internal_static_ReplicateWALEntryRequest_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicateWALEntryRequest_descriptor, + new java.lang.String[] { "WalEntry", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); + internal_static_ReplicateWALEntryResponse_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicateWALEntryResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); + internal_static_RollWALWriterRequest_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_RollWALWriterRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RollWALWriterRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); + internal_static_RollWALWriterResponse_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_RollWALWriterResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RollWALWriterResponse_descriptor, + new java.lang.String[] { "RegionToFlush", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); + internal_static_StopServerRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_StopServerRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StopServerRequest_descriptor, + new java.lang.String[] { "Reason", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); + internal_static_StopServerResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_StopServerResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StopServerResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); + internal_static_GetServerInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_GetServerInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetServerInfoRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); + internal_static_GetServerInfoResponse_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_GetServerInfoResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetServerInfoResponse_descriptor, + new java.lang.String[] { "ServerName", }, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java new file mode 100644 index 0000000..de820e2 --- /dev/null +++ src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java @@ -0,0 +1,20836 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Client.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ClientProtos { + private ClientProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ColumnOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // repeated bytes qualifier = 2; + java.util.List getQualifierList(); + int getQualifierCount(); + com.google.protobuf.ByteString getQualifier(int index); + } + public static final class Column extends + com.google.protobuf.GeneratedMessage + implements ColumnOrBuilder { + // Use Column.newBuilder() to construct. + private Column(Builder builder) { + super(builder); + } + private Column(boolean noInit) {} + + private static final Column defaultInstance; + public static Column getDefaultInstance() { + return defaultInstance; + } + + public Column getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // repeated bytes qualifier = 2; + public static final int QUALIFIER_FIELD_NUMBER = 2; + private java.util.List qualifier_; + public java.util.List + getQualifierList() { + return qualifier_; + } + public int getQualifierCount() { + return qualifier_.size(); + } + public com.google.protobuf.ByteString getQualifier(int index) { + return qualifier_.get(index); + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + for (int i = 0; i < qualifier_.size(); i++) { + output.writeBytes(2, qualifier_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + { + int dataSize = 0; + for (int i = 0; i < qualifier_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(qualifier_.get(i)); + } + size += dataSize; + size += 1 * getQualifierList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && getQualifierList() + .equals(other.getQualifierList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (getQualifierCount() > 0) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifierList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + qualifier_ = java.util.Collections.unmodifiableList(qualifier_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.qualifier_ = qualifier_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (!other.qualifier_.isEmpty()) { + if (qualifier_.isEmpty()) { + qualifier_ = other.qualifier_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureQualifierIsMutable(); + qualifier_.addAll(other.qualifier_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + ensureQualifierIsMutable(); + qualifier_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // repeated bytes qualifier = 2; + private java.util.List qualifier_ = java.util.Collections.emptyList();; + private void ensureQualifierIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + qualifier_ = new java.util.ArrayList(qualifier_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getQualifierList() { + return java.util.Collections.unmodifiableList(qualifier_); + } + public int getQualifierCount() { + return qualifier_.size(); + } + public com.google.protobuf.ByteString getQualifier(int index) { + return qualifier_.get(index); + } + public Builder setQualifier( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierIsMutable(); + qualifier_.set(index, value); + onChanged(); + return this; + } + public Builder addQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierIsMutable(); + qualifier_.add(value); + onChanged(); + return this; + } + public Builder addAllQualifier( + java.lang.Iterable values) { + ensureQualifierIsMutable(); + super.addAll(values, qualifier_); + onChanged(); + return this; + } + public Builder clearQualifier() { + qualifier_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Column) + } + + static { + defaultInstance = new Column(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Column) + } + + public interface GetOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // repeated .Column column = 2; + java.util.List + getColumnList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); + int getColumnCount(); + java.util.List + getColumnOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index); + + // repeated .NameBytesPair attribute = 3; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index); + + // optional uint64 lockId = 4; + boolean hasLockId(); + long getLockId(); + + // optional .NameBytesPair filter = 5; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder(); + + // optional .TimeRange timeRange = 6; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + + // optional uint32 maxVersions = 7 [default = 1]; + boolean hasMaxVersions(); + int getMaxVersions(); + + // optional bool cacheBlocks = 8 [default = true]; + boolean hasCacheBlocks(); + boolean getCacheBlocks(); + } + public static final class Get extends + com.google.protobuf.GeneratedMessage + implements GetOrBuilder { + // Use Get.newBuilder() to construct. + private Get(Builder builder) { + super(builder); + } + private Get(boolean noInit) {} + + private static final Get defaultInstance; + public static Get getDefaultInstance() { + return defaultInstance; + } + + public Get getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // repeated .Column column = 2; + public static final int COLUMN_FIELD_NUMBER = 2; + private java.util.List column_; + public java.util.List getColumnList() { + return column_; + } + public java.util.List + getColumnOrBuilderList() { + return column_; + } + public int getColumnCount() { + return column_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { + return column_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + return column_.get(index); + } + + // repeated .NameBytesPair attribute = 3; + public static final int ATTRIBUTE_FIELD_NUMBER = 3; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional uint64 lockId = 4; + public static final int LOCKID_FIELD_NUMBER = 4; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + + // optional .NameBytesPair filter = 5; + public static final int FILTER_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder() { + return filter_; + } + + // optional .TimeRange timeRange = 6; + public static final int TIMERANGE_FIELD_NUMBER = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + public static final int MAXVERSIONS_FIELD_NUMBER = 7; + private int maxVersions_; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public int getMaxVersions() { + return maxVersions_; + } + + // optional bool cacheBlocks = 8 [default = true]; + public static final int CACHEBLOCKS_FIELD_NUMBER = 8; + private boolean cacheBlocks_; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + column_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + lockId_ = 0L; + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + maxVersions_ = 1; + cacheBlocks_ = true; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + for (int i = 0; i < column_.size(); i++) { + output.writeMessage(2, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(3, attribute_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(4, lockId_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeUInt32(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(8, cacheBlocks_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + for (int i = 0; i < column_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, attribute_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, lockId_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, cacheBlocks_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && getColumnList() + .equals(other.getColumnList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && (hasMaxVersions() == other.hasMaxVersions()); + if (hasMaxVersions()) { + result = result && (getMaxVersions() + == other.getMaxVersions()); + } + result = result && (hasCacheBlocks() == other.hasCacheBlocks()); + if (hasCacheBlocks()) { + result = result && (getCacheBlocks() + == other.getCacheBlocks()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (getColumnCount() > 0) { + hash = (37 * hash) + COLUMN_FIELD_NUMBER; + hash = (53 * hash) + getColumnList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + if (hasMaxVersions()) { + hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; + hash = (53 * hash) + getMaxVersions(); + } + if (hasCacheBlocks()) { + hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCacheBlocks()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFieldBuilder(); + getAttributeFieldBuilder(); + getFilterFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + columnBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + attributeBuilder_.clear(); + } + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + maxVersions_ = 1; + bitField0_ = (bitField0_ & ~0x00000040); + cacheBlocks_ = true; + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (columnBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + column_ = java.util.Collections.unmodifiableList(column_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.column_ = column_; + } else { + result.column_ = columnBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000002; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.maxVersions_ = maxVersions_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + result.cacheBlocks_ = cacheBlocks_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (columnBuilder_ == null) { + if (!other.column_.isEmpty()) { + if (column_.isEmpty()) { + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureColumnIsMutable(); + column_.addAll(other.column_); + } + onChanged(); + } + } else { + if (!other.column_.isEmpty()) { + if (columnBuilder_.isEmpty()) { + columnBuilder_.dispose(); + columnBuilder_ = null; + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000002); + columnBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnFieldBuilder() : null; + } else { + columnBuilder_.addAllMessages(other.column_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000004); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + if (other.hasMaxVersions()) { + setMaxVersions(other.getMaxVersions()); + } + if (other.hasCacheBlocks()) { + setCacheBlocks(other.getCacheBlocks()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumn(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 32: { + bitField0_ |= 0x00000008; + lockId_ = input.readUInt64(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + case 56: { + bitField0_ |= 0x00000040; + maxVersions_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + cacheBlocks_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // repeated .Column column = 2; + private java.util.List column_ = + java.util.Collections.emptyList(); + private void ensureColumnIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + column_ = new java.util.ArrayList(column_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; + + public java.util.List getColumnList() { + if (columnBuilder_ == null) { + return java.util.Collections.unmodifiableList(column_); + } else { + return columnBuilder_.getMessageList(); + } + } + public int getColumnCount() { + if (columnBuilder_ == null) { + return column_.size(); + } else { + return columnBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { + if (columnBuilder_ == null) { + return column_.get(index); + } else { + return columnBuilder_.getMessage(index); + } + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.set(index, value); + onChanged(); + } else { + columnBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.set(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(value); + onChanged(); + } else { + columnBuilder_.addMessage(value); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(index, value); + onChanged(); + } else { + columnBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumn( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumn( + java.lang.Iterable values) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + super.addAll(values, column_); + onChanged(); + } else { + columnBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumn() { + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + columnBuilder_.clear(); + } + return this; + } + public Builder removeColumn(int index) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.remove(index); + onChanged(); + } else { + columnBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( + int index) { + return getColumnFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + if (columnBuilder_ == null) { + return column_.get(index); } else { + return columnBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnOrBuilderList() { + if (columnBuilder_ != null) { + return columnBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(column_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { + return getColumnFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( + int index) { + return getColumnFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); + } + public java.util.List + getColumnBuilderList() { + return getColumnFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> + getColumnFieldBuilder() { + if (columnBuilder_ == null) { + columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( + column_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + column_ = null; + } + return columnBuilder_; + } + + // repeated .NameBytesPair attribute = 3; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + attribute_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional uint64 lockId = 4; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000008; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000008); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional .NameBytesPair filter = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getFilterBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // optional .TimeRange timeRange = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + private int maxVersions_ = 1; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getMaxVersions() { + return maxVersions_; + } + public Builder setMaxVersions(int value) { + bitField0_ |= 0x00000040; + maxVersions_ = value; + onChanged(); + return this; + } + public Builder clearMaxVersions() { + bitField0_ = (bitField0_ & ~0x00000040); + maxVersions_ = 1; + onChanged(); + return this; + } + + // optional bool cacheBlocks = 8 [default = true]; + private boolean cacheBlocks_ = true; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + public Builder setCacheBlocks(boolean value) { + bitField0_ |= 0x00000080; + cacheBlocks_ = value; + onChanged(); + return this; + } + public Builder clearCacheBlocks() { + bitField0_ = (bitField0_ & ~0x00000080); + cacheBlocks_ = true; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Get) + } + + static { + defaultInstance = new Get(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Get) + } + + public interface ResultOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes keyValueBytes = 1; + java.util.List getKeyValueBytesList(); + int getKeyValueBytesCount(); + com.google.protobuf.ByteString getKeyValueBytes(int index); + } + public static final class Result extends + com.google.protobuf.GeneratedMessage + implements ResultOrBuilder { + // Use Result.newBuilder() to construct. + private Result(Builder builder) { + super(builder); + } + private Result(boolean noInit) {} + + private static final Result defaultInstance; + public static Result getDefaultInstance() { + return defaultInstance; + } + + public Result getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; + } + + // repeated bytes keyValueBytes = 1; + public static final int KEYVALUEBYTES_FIELD_NUMBER = 1; + private java.util.List keyValueBytes_; + public java.util.List + getKeyValueBytesList() { + return keyValueBytes_; + } + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); + } + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); + } + + private void initFields() { + keyValueBytes_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < keyValueBytes_.size(); i++) { + output.writeBytes(1, keyValueBytes_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < keyValueBytes_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(keyValueBytes_.get(i)); + } + size += dataSize; + size += 1 * getKeyValueBytesList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj; + + boolean result = true; + result = result && getKeyValueBytesList() + .equals(other.getKeyValueBytesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getKeyValueBytesCount() > 0) { + hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER; + hash = (53 * hash) + getKeyValueBytesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.keyValueBytes_ = keyValueBytes_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this; + if (!other.keyValueBytes_.isEmpty()) { + if (keyValueBytes_.isEmpty()) { + keyValueBytes_ = other.keyValueBytes_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureKeyValueBytesIsMutable(); + keyValueBytes_.addAll(other.keyValueBytes_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes keyValueBytes = 1; + private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; + private void ensureKeyValueBytesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getKeyValueBytesList() { + return java.util.Collections.unmodifiableList(keyValueBytes_); + } + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); + } + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); + } + public Builder setKeyValueBytes( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.set(index, value); + onChanged(); + return this; + } + public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(value); + onChanged(); + return this; + } + public Builder addAllKeyValueBytes( + java.lang.Iterable values) { + ensureKeyValueBytesIsMutable(); + super.addAll(values, keyValueBytes_); + onChanged(); + return this; + } + public Builder clearKeyValueBytes() { + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Result) + } + + static { + defaultInstance = new Result(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Result) + } + + public interface GetRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Get get = 2; + boolean hasGet(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); + + // optional bool closestRowBefore = 3; + boolean hasClosestRowBefore(); + boolean getClosestRowBefore(); + + // optional bool existenceOnly = 4; + boolean hasExistenceOnly(); + boolean getExistenceOnly(); + } + public static final class GetRequest extends + com.google.protobuf.GeneratedMessage + implements GetRequestOrBuilder { + // Use GetRequest.newBuilder() to construct. + private GetRequest(Builder builder) { + super(builder); + } + private GetRequest(boolean noInit) {} + + private static final GetRequest defaultInstance; + public static GetRequest getDefaultInstance() { + return defaultInstance; + } + + public GetRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Get get = 2; + public static final int GET_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; + public boolean hasGet() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { + return get_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { + return get_; + } + + // optional bool closestRowBefore = 3; + public static final int CLOSESTROWBEFORE_FIELD_NUMBER = 3; + private boolean closestRowBefore_; + public boolean hasClosestRowBefore() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getClosestRowBefore() { + return closestRowBefore_; + } + + // optional bool existenceOnly = 4; + public static final int EXISTENCEONLY_FIELD_NUMBER = 4; + private boolean existenceOnly_; + public boolean hasExistenceOnly() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getExistenceOnly() { + return existenceOnly_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + closestRowBefore_ = false; + existenceOnly_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasGet()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getGet().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, get_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, closestRowBefore_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(4, existenceOnly_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, get_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, closestRowBefore_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, existenceOnly_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasGet() == other.hasGet()); + if (hasGet()) { + result = result && getGet() + .equals(other.getGet()); + } + result = result && (hasClosestRowBefore() == other.hasClosestRowBefore()); + if (hasClosestRowBefore()) { + result = result && (getClosestRowBefore() + == other.getClosestRowBefore()); + } + result = result && (hasExistenceOnly() == other.hasExistenceOnly()); + if (hasExistenceOnly()) { + result = result && (getExistenceOnly() + == other.getExistenceOnly()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasGet()) { + hash = (37 * hash) + GET_FIELD_NUMBER; + hash = (53 * hash) + getGet().hashCode(); + } + if (hasClosestRowBefore()) { + hash = (37 * hash) + CLOSESTROWBEFORE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getClosestRowBefore()); + } + if (hasExistenceOnly()) { + hash = (37 * hash) + EXISTENCEONLY_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getExistenceOnly()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getGetFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (getBuilder_ == null) { + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + } else { + getBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + closestRowBefore_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + existenceOnly_ = false; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (getBuilder_ == null) { + result.get_ = get_; + } else { + result.get_ = getBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.closestRowBefore_ = closestRowBefore_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.existenceOnly_ = existenceOnly_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasGet()) { + mergeGet(other.getGet()); + } + if (other.hasClosestRowBefore()) { + setClosestRowBefore(other.getClosestRowBefore()); + } + if (other.hasExistenceOnly()) { + setExistenceOnly(other.getExistenceOnly()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasGet()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getGet().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(); + if (hasGet()) { + subBuilder.mergeFrom(getGet()); + } + input.readMessage(subBuilder, extensionRegistry); + setGet(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + closestRowBefore_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + existenceOnly_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Get get = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; + public boolean hasGet() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { + if (getBuilder_ == null) { + return get_; + } else { + return getBuilder_.getMessage(); + } + } + public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { + if (getBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + get_ = value; + onChanged(); + } else { + getBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setGet( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { + if (getBuilder_ == null) { + get_ = builderForValue.build(); + onChanged(); + } else { + getBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { + if (getBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { + get_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); + } else { + get_ = value; + } + onChanged(); + } else { + getBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearGet() { + if (getBuilder_ == null) { + get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); + onChanged(); + } else { + getBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getGetFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { + if (getBuilder_ != null) { + return getBuilder_.getMessageOrBuilder(); + } else { + return get_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> + getGetFieldBuilder() { + if (getBuilder_ == null) { + getBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>( + get_, + getParentForChildren(), + isClean()); + get_ = null; + } + return getBuilder_; + } + + // optional bool closestRowBefore = 3; + private boolean closestRowBefore_ ; + public boolean hasClosestRowBefore() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getClosestRowBefore() { + return closestRowBefore_; + } + public Builder setClosestRowBefore(boolean value) { + bitField0_ |= 0x00000004; + closestRowBefore_ = value; + onChanged(); + return this; + } + public Builder clearClosestRowBefore() { + bitField0_ = (bitField0_ & ~0x00000004); + closestRowBefore_ = false; + onChanged(); + return this; + } + + // optional bool existenceOnly = 4; + private boolean existenceOnly_ ; + public boolean hasExistenceOnly() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getExistenceOnly() { + return existenceOnly_; + } + public Builder setExistenceOnly(boolean value) { + bitField0_ |= 0x00000008; + existenceOnly_ = value; + onChanged(); + return this; + } + public Builder clearExistenceOnly() { + bitField0_ = (bitField0_ & ~0x00000008); + existenceOnly_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetRequest) + } + + static { + defaultInstance = new GetRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRequest) + } + + public interface GetResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .Result result = 1; + boolean hasResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); + + // optional bool exists = 2; + boolean hasExists(); + boolean getExists(); + } + public static final class GetResponse extends + com.google.protobuf.GeneratedMessage + implements GetResponseOrBuilder { + // Use GetResponse.newBuilder() to construct. + private GetResponse(Builder builder) { + super(builder); + } + private GetResponse(boolean noInit) {} + + private static final GetResponse defaultInstance; + public static GetResponse getDefaultInstance() { + return defaultInstance; + } + + public GetResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + return result_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + return result_; + } + + // optional bool exists = 2; + public static final int EXISTS_FIELD_NUMBER = 2; + private boolean exists_; + public boolean hasExists() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getExists() { + return exists_; + } + + private void initFields() { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + exists_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, exists_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, exists_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj; + + boolean result = true; + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && (hasExists() == other.hasExists()); + if (hasExists()) { + result = result && (getExists() + == other.getExists()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + if (hasExists()) { + hash = (37 * hash) + EXISTS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getExists()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + exists_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (resultBuilder_ == null) { + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.exists_ = exists_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this; + if (other.hasResult()) { + mergeResult(other.getResult()); + } + if (other.hasExists()) { + setExists(other.getExists()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); + if (hasResult()) { + subBuilder.mergeFrom(getResult()); + } + input.readMessage(subBuilder, extensionRegistry); + setResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + exists_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional .Result result = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + if (resultBuilder_ == null) { + return result_; + } else { + return resultBuilder_.getMessage(); + } + } + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + result_ = value; + onChanged(); + } else { + resultBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + result_ = builderForValue.build(); + onChanged(); + } else { + resultBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { + result_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + } else { + result_ = value; + } + onChanged(); + } else { + resultBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + onChanged(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getResultFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilder(); + } else { + return result_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( + result_, + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional bool exists = 2; + private boolean exists_ ; + public boolean hasExists() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getExists() { + return exists_; + } + public Builder setExists(boolean value) { + bitField0_ |= 0x00000002; + exists_ = value; + onChanged(); + return this; + } + public Builder clearExists() { + bitField0_ = (bitField0_ & ~0x00000002); + exists_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetResponse) + } + + static { + defaultInstance = new GetResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetResponse) + } + + public interface ConditionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required bytes family = 2; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required bytes qualifier = 3; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + + // required .Condition.CompareType compareType = 4; + boolean hasCompareType(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType getCompareType(); + + // required .NameBytesPair comparator = 5; + boolean hasComparator(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getComparator(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getComparatorOrBuilder(); + } + public static final class Condition extends + com.google.protobuf.GeneratedMessage + implements ConditionOrBuilder { + // Use Condition.newBuilder() to construct. + private Condition(Builder builder) { + super(builder); + } + private Condition(boolean noInit) {} + + private static final Condition defaultInstance; + public static Condition getDefaultInstance() { + return defaultInstance; + } + + public Condition getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; + } + + public enum CompareType + implements com.google.protobuf.ProtocolMessageEnum { + LESS(0, 0), + LESS_OR_EQUAL(1, 1), + EQUAL(2, 2), + NOT_EQUAL(3, 3), + GREATER_OR_EQUAL(4, 4), + GREATER(5, 5), + NO_OP(6, 6), + ; + + public static final int LESS_VALUE = 0; + public static final int LESS_OR_EQUAL_VALUE = 1; + public static final int EQUAL_VALUE = 2; + public static final int NOT_EQUAL_VALUE = 3; + public static final int GREATER_OR_EQUAL_VALUE = 4; + public static final int GREATER_VALUE = 5; + public static final int NO_OP_VALUE = 6; + + + public final int getNumber() { return value; } + + public static CompareType valueOf(int value) { + switch (value) { + case 0: return LESS; + case 1: return LESS_OR_EQUAL; + case 2: return EQUAL; + case 3: return NOT_EQUAL; + case 4: return GREATER_OR_EQUAL; + case 5: return GREATER; + case 6: return NO_OP; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public CompareType findValueByNumber(int number) { + return CompareType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDescriptor().getEnumTypes().get(0); + } + + private static final CompareType[] VALUES = { + LESS, LESS_OR_EQUAL, EQUAL, NOT_EQUAL, GREATER_OR_EQUAL, GREATER, NO_OP, + }; + + public static CompareType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private CompareType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Condition.CompareType) + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required bytes family = 2; + public static final int FAMILY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required bytes qualifier = 3; + public static final int QUALIFIER_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + // required .Condition.CompareType compareType = 4; + public static final int COMPARETYPE_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType compareType_; + public boolean hasCompareType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType getCompareType() { + return compareType_; + } + + // required .NameBytesPair comparator = 5; + public static final int COMPARATOR_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair comparator_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getComparator() { + return comparator_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getComparatorOrBuilder() { + return comparator_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + compareType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.LESS; + comparator_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasQualifier()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCompareType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasComparator()) { + memoizedIsInitialized = 0; + return false; + } + if (!getComparator().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeEnum(4, compareType_.getNumber()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeMessage(5, comparator_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, compareType_.getNumber()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, comparator_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && (hasCompareType() == other.hasCompareType()); + if (hasCompareType()) { + result = result && + (getCompareType() == other.getCompareType()); + } + result = result && (hasComparator() == other.hasComparator()); + if (hasComparator()) { + result = result && getComparator() + .equals(other.getComparator()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + if (hasCompareType()) { + hash = (37 * hash) + COMPARETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getCompareType()); + } + if (hasComparator()) { + hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; + hash = (53 * hash) + getComparator().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparatorFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + compareType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.LESS; + bitField0_ = (bitField0_ & ~0x00000008); + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + comparatorBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.qualifier_ = qualifier_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.compareType_ = compareType_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + if (comparatorBuilder_ == null) { + result.comparator_ = comparator_; + } else { + result.comparator_ = comparatorBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + if (other.hasCompareType()) { + setCompareType(other.getCompareType()); + } + if (other.hasComparator()) { + mergeComparator(other.getComparator()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasFamily()) { + + return false; + } + if (!hasQualifier()) { + + return false; + } + if (!hasCompareType()) { + + return false; + } + if (!hasComparator()) { + + return false; + } + if (!getComparator().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + compareType_ = value; + } + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasComparator()) { + subBuilder.mergeFrom(getComparator()); + } + input.readMessage(subBuilder, extensionRegistry); + setComparator(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required bytes family = 2; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000002); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required bytes qualifier = 3; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000004); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // required .Condition.CompareType compareType = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.LESS; + public boolean hasCompareType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType getCompareType() { + return compareType_; + } + public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + compareType_ = value; + onChanged(); + return this; + } + public Builder clearCompareType() { + bitField0_ = (bitField0_ & ~0x00000008); + compareType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.CompareType.LESS; + onChanged(); + return this; + } + + // required .NameBytesPair comparator = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair comparator_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> comparatorBuilder_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getComparator() { + if (comparatorBuilder_ == null) { + return comparator_; + } else { + return comparatorBuilder_.getMessage(); + } + } + public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (comparatorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparator_ = value; + onChanged(); + } else { + comparatorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setComparator( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (comparatorBuilder_ == null) { + comparator_ = builderForValue.build(); + onChanged(); + } else { + comparatorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (comparatorBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + comparator_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + comparator_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(comparator_).mergeFrom(value).buildPartial(); + } else { + comparator_ = value; + } + onChanged(); + } else { + comparatorBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearComparator() { + if (comparatorBuilder_ == null) { + comparator_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + comparatorBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getComparatorBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getComparatorFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getComparatorOrBuilder() { + if (comparatorBuilder_ != null) { + return comparatorBuilder_.getMessageOrBuilder(); + } else { + return comparator_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getComparatorFieldBuilder() { + if (comparatorBuilder_ == null) { + comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + comparator_, + getParentForChildren(), + isClean()); + comparator_ = null; + } + return comparatorBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Condition) + } + + static { + defaultInstance = new Condition(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Condition) + } + + public interface MutateOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required .Mutate.MutateType mutateType = 2; + boolean hasMutateType(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType(); + + // repeated .Mutate.ColumnValue columnValue = 3; + java.util.List + getColumnValueList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index); + int getColumnValueCount(); + java.util.List + getColumnValueOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index); + + // repeated .NameBytesPair attribute = 4; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index); + + // optional uint64 timestamp = 5; + boolean hasTimestamp(); + long getTimestamp(); + + // optional uint64 lockId = 6; + boolean hasLockId(); + long getLockId(); + + // optional bool writeToWAL = 7 [default = true]; + boolean hasWriteToWAL(); + boolean getWriteToWAL(); + + // optional .TimeRange timeRange = 10; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + } + public static final class Mutate extends + com.google.protobuf.GeneratedMessage + implements MutateOrBuilder { + // Use Mutate.newBuilder() to construct. + private Mutate(Builder builder) { + super(builder); + } + private Mutate(boolean noInit) {} + + private static final Mutate defaultInstance; + public static Mutate getDefaultInstance() { + return defaultInstance; + } + + public Mutate getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_fieldAccessorTable; + } + + public enum MutateType + implements com.google.protobuf.ProtocolMessageEnum { + APPEND(0, 0), + INCREMENT(1, 1), + PUT(2, 2), + DELETE(3, 3), + ; + + public static final int APPEND_VALUE = 0; + public static final int INCREMENT_VALUE = 1; + public static final int PUT_VALUE = 2; + public static final int DELETE_VALUE = 3; + + + public final int getNumber() { return value; } + + public static MutateType valueOf(int value) { + switch (value) { + case 0: return APPEND; + case 1: return INCREMENT; + case 2: return PUT; + case 3: return DELETE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public MutateType findValueByNumber(int number) { + return MutateType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor().getEnumTypes().get(0); + } + + private static final MutateType[] VALUES = { + APPEND, INCREMENT, PUT, DELETE, + }; + + public static MutateType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private MutateType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Mutate.MutateType) + } + + public enum DeleteType + implements com.google.protobuf.ProtocolMessageEnum { + DELETE_ONE_VERSION(0, 0), + DELETE_MULTIPLE_VERSIONS(1, 1), + DELETE_FAMILY(2, 2), + ; + + public static final int DELETE_ONE_VERSION_VALUE = 0; + public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1; + public static final int DELETE_FAMILY_VALUE = 2; + + + public final int getNumber() { return value; } + + public static DeleteType valueOf(int value) { + switch (value) { + case 0: return DELETE_ONE_VERSION; + case 1: return DELETE_MULTIPLE_VERSIONS; + case 2: return DELETE_FAMILY; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public DeleteType findValueByNumber(int number) { + return DeleteType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor().getEnumTypes().get(1); + } + + private static final DeleteType[] VALUES = { + DELETE_ONE_VERSION, DELETE_MULTIPLE_VERSIONS, DELETE_FAMILY, + }; + + public static DeleteType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private DeleteType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Mutate.DeleteType) + } + + public interface ColumnValueOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + java.util.List + getQualifierValueList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index); + int getQualifierValueCount(); + java.util.List + getQualifierValueOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index); + } + public static final class ColumnValue extends + com.google.protobuf.GeneratedMessage + implements ColumnValueOrBuilder { + // Use ColumnValue.newBuilder() to construct. + private ColumnValue(Builder builder) { + super(builder); + } + private ColumnValue(boolean noInit) {} + + private static final ColumnValue defaultInstance; + public static ColumnValue getDefaultInstance() { + return defaultInstance; + } + + public ColumnValue getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; + } + + public interface QualifierValueOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes qualifier = 1; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + + // optional bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + + // optional uint64 timestamp = 3; + boolean hasTimestamp(); + long getTimestamp(); + + // optional .Mutate.DeleteType deleteType = 4; + boolean hasDeleteType(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType(); + } + public static final class QualifierValue extends + com.google.protobuf.GeneratedMessage + implements QualifierValueOrBuilder { + // Use QualifierValue.newBuilder() to construct. + private QualifierValue(Builder builder) { + super(builder); + } + private QualifierValue(boolean noInit) {} + + private static final QualifierValue defaultInstance; + public static QualifierValue getDefaultInstance() { + return defaultInstance; + } + + public QualifierValue getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes qualifier = 1; + public static final int QUALIFIER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + // optional bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + // optional uint64 timestamp = 3; + public static final int TIMESTAMP_FIELD_NUMBER = 3; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + + // optional .Mutate.DeleteType deleteType = 4; + public static final int DELETETYPE_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType deleteType_; + public boolean hasDeleteType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType() { + return deleteType_; + } + + private void initFields() { + qualifier_ = com.google.protobuf.ByteString.EMPTY; + value_ = com.google.protobuf.ByteString.EMPTY; + timestamp_ = 0L; + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, qualifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeEnum(4, deleteType_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, qualifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, deleteType_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue) obj; + + boolean result = true; + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && (hasDeleteType() == other.hasDeleteType()); + if (hasDeleteType()) { + result = result && + (getDeleteType() == other.getDeleteType()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + if (hasDeleteType()) { + hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getDeleteType()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.qualifier_ = qualifier_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.deleteType_ = deleteType_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()) return this; + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + if (other.hasDeleteType()) { + setDeleteType(other.getDeleteType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + qualifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + timestamp_ = input.readUInt64(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + deleteType_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // optional bytes qualifier = 1; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // optional bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // optional uint64 timestamp = 3; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000004; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000004); + timestamp_ = 0L; + onChanged(); + return this; + } + + // optional .Mutate.DeleteType deleteType = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + public boolean hasDeleteType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType getDeleteType() { + return deleteType_; + } + public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + deleteType_ = value; + onChanged(); + return this; + } + public Builder clearDeleteType() { + bitField0_ = (bitField0_ & ~0x00000008); + deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.DeleteType.DELETE_ONE_VERSION; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue.QualifierValue) + } + + static { + defaultInstance = new QualifierValue(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate.ColumnValue.QualifierValue) + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + public static final int QUALIFIERVALUE_FIELD_NUMBER = 2; + private java.util.List qualifierValue_; + public java.util.List getQualifierValueList() { + return qualifierValue_; + } + public java.util.List + getQualifierValueOrBuilderList() { + return qualifierValue_; + } + public int getQualifierValueCount() { + return qualifierValue_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { + return qualifierValue_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index) { + return qualifierValue_.get(index); + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifierValue_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + for (int i = 0; i < qualifierValue_.size(); i++) { + output.writeMessage(2, qualifierValue_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + for (int i = 0; i < qualifierValue_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, qualifierValue_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && getQualifierValueList() + .equals(other.getQualifierValueList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (getQualifierValueCount() > 0) { + hash = (37 * hash) + QUALIFIERVALUE_FIELD_NUMBER; + hash = (53 * hash) + getQualifierValueList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getQualifierValueFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (qualifierValueBuilder_ == null) { + qualifierValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + qualifierValueBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (qualifierValueBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.qualifierValue_ = qualifierValue_; + } else { + result.qualifierValue_ = qualifierValueBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (qualifierValueBuilder_ == null) { + if (!other.qualifierValue_.isEmpty()) { + if (qualifierValue_.isEmpty()) { + qualifierValue_ = other.qualifierValue_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureQualifierValueIsMutable(); + qualifierValue_.addAll(other.qualifierValue_); + } + onChanged(); + } + } else { + if (!other.qualifierValue_.isEmpty()) { + if (qualifierValueBuilder_.isEmpty()) { + qualifierValueBuilder_.dispose(); + qualifierValueBuilder_ = null; + qualifierValue_ = other.qualifierValue_; + bitField0_ = (bitField0_ & ~0x00000002); + qualifierValueBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getQualifierValueFieldBuilder() : null; + } else { + qualifierValueBuilder_.addAllMessages(other.qualifierValue_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addQualifierValue(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + private java.util.List qualifierValue_ = + java.util.Collections.emptyList(); + private void ensureQualifierValueIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + qualifierValue_ = new java.util.ArrayList(qualifierValue_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; + + public java.util.List getQualifierValueList() { + if (qualifierValueBuilder_ == null) { + return java.util.Collections.unmodifiableList(qualifierValue_); + } else { + return qualifierValueBuilder_.getMessageList(); + } + } + public int getQualifierValueCount() { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.size(); + } else { + return qualifierValueBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.get(index); + } else { + return qualifierValueBuilder_.getMessage(index); + } + } + public Builder setQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.set(index, value); + onChanged(); + } else { + qualifierValueBuilder_.setMessage(index, value); + } + return this; + } + public Builder setQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.set(index, builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.add(value); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(value); + } + return this; + } + public Builder addQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.add(index, value); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(index, value); + } + return this; + } + public Builder addQualifierValue( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.add(builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.add(index, builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllQualifierValue( + java.lang.Iterable values) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + super.addAll(values, qualifierValue_); + onChanged(); + } else { + qualifierValueBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearQualifierValue() { + if (qualifierValueBuilder_ == null) { + qualifierValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + qualifierValueBuilder_.clear(); + } + return this; + } + public Builder removeQualifierValue(int index) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.remove(index); + onChanged(); + } else { + qualifierValueBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( + int index) { + return getQualifierValueFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index) { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.get(index); } else { + return qualifierValueBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getQualifierValueOrBuilderList() { + if (qualifierValueBuilder_ != null) { + return qualifierValueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(qualifierValue_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { + return getQualifierValueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( + int index) { + return getQualifierValueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); + } + public java.util.List + getQualifierValueBuilderList() { + return getQualifierValueFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> + getQualifierValueFieldBuilder() { + if (qualifierValueBuilder_ == null) { + qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder>( + qualifierValue_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + qualifierValue_ = null; + } + return qualifierValueBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue) + } + + static { + defaultInstance = new ColumnValue(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate.ColumnValue) + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required .Mutate.MutateType mutateType = 2; + public static final int MUTATETYPE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType mutateType_; + public boolean hasMutateType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType() { + return mutateType_; + } + + // repeated .Mutate.ColumnValue columnValue = 3; + public static final int COLUMNVALUE_FIELD_NUMBER = 3; + private java.util.List columnValue_; + public java.util.List getColumnValueList() { + return columnValue_; + } + public java.util.List + getColumnValueOrBuilderList() { + return columnValue_; + } + public int getColumnValueCount() { + return columnValue_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index) { + return columnValue_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index) { + return columnValue_.get(index); + } + + // repeated .NameBytesPair attribute = 4; + public static final int ATTRIBUTE_FIELD_NUMBER = 4; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional uint64 timestamp = 5; + public static final int TIMESTAMP_FIELD_NUMBER = 5; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + + // optional uint64 lockId = 6; + public static final int LOCKID_FIELD_NUMBER = 6; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getLockId() { + return lockId_; + } + + // optional bool writeToWAL = 7 [default = true]; + public static final int WRITETOWAL_FIELD_NUMBER = 7; + private boolean writeToWAL_; + public boolean hasWriteToWAL() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getWriteToWAL() { + return writeToWAL_; + } + + // optional .TimeRange timeRange = 10; + public static final int TIMERANGE_FIELD_NUMBER = 10; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; + columnValue_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + timestamp_ = 0L; + lockId_ = 0L; + writeToWAL_ = true; + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMutateType()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getColumnValueCount(); i++) { + if (!getColumnValue(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, mutateType_.getNumber()); + } + for (int i = 0; i < columnValue_.size(); i++) { + output.writeMessage(3, columnValue_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(4, attribute_.get(i)); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(5, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(6, lockId_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(7, writeToWAL_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeMessage(10, timeRange_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, mutateType_.getNumber()); + } + for (int i = 0; i < columnValue_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, columnValue_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, attribute_.get(i)); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(5, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(6, lockId_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(7, writeToWAL_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, timeRange_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasMutateType() == other.hasMutateType()); + if (hasMutateType()) { + result = result && + (getMutateType() == other.getMutateType()); + } + result = result && getColumnValueList() + .equals(other.getColumnValueList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasWriteToWAL() == other.hasWriteToWAL()); + if (hasWriteToWAL()) { + result = result && (getWriteToWAL() + == other.getWriteToWAL()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasMutateType()) { + hash = (37 * hash) + MUTATETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getMutateType()); + } + if (getColumnValueCount() > 0) { + hash = (37 * hash) + COLUMNVALUE_FIELD_NUMBER; + hash = (53 * hash) + getColumnValueList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasWriteToWAL()) { + hash = (37 * hash) + WRITETOWAL_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getWriteToWAL()); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Mutate_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnValueFieldBuilder(); + getAttributeFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; + bitField0_ = (bitField0_ & ~0x00000002); + if (columnValueBuilder_ == null) { + columnValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + columnValueBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + attributeBuilder_.clear(); + } + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000010); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000020); + writeToWAL_ = true; + bitField0_ = (bitField0_ & ~0x00000040); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.mutateType_ = mutateType_; + if (columnValueBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + columnValue_ = java.util.Collections.unmodifiableList(columnValue_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.columnValue_ = columnValue_; + } else { + result.columnValue_ = columnValueBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.writeToWAL_ = writeToWAL_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasMutateType()) { + setMutateType(other.getMutateType()); + } + if (columnValueBuilder_ == null) { + if (!other.columnValue_.isEmpty()) { + if (columnValue_.isEmpty()) { + columnValue_ = other.columnValue_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureColumnValueIsMutable(); + columnValue_.addAll(other.columnValue_); + } + onChanged(); + } + } else { + if (!other.columnValue_.isEmpty()) { + if (columnValueBuilder_.isEmpty()) { + columnValueBuilder_.dispose(); + columnValueBuilder_ = null; + columnValue_ = other.columnValue_; + bitField0_ = (bitField0_ & ~0x00000004); + columnValueBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnValueFieldBuilder() : null; + } else { + columnValueBuilder_.addAllMessages(other.columnValue_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000008); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasWriteToWAL()) { + setWriteToWAL(other.getWriteToWAL()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasMutateType()) { + + return false; + } + for (int i = 0; i < getColumnValueCount(); i++) { + if (!getColumnValue(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + mutateType_ = value; + } + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumnValue(subBuilder.buildPartial()); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 40: { + bitField0_ |= 0x00000010; + timestamp_ = input.readUInt64(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + lockId_ = input.readUInt64(); + break; + } + case 56: { + bitField0_ |= 0x00000040; + writeToWAL_ = input.readBool(); + break; + } + case 82: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required .Mutate.MutateType mutateType = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; + public boolean hasMutateType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType getMutateType() { + return mutateType_; + } + public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + mutateType_ = value; + onChanged(); + return this; + } + public Builder clearMutateType() { + bitField0_ = (bitField0_ & ~0x00000002); + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType.APPEND; + onChanged(); + return this; + } + + // repeated .Mutate.ColumnValue columnValue = 3; + private java.util.List columnValue_ = + java.util.Collections.emptyList(); + private void ensureColumnValueIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + columnValue_ = new java.util.ArrayList(columnValue_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder> columnValueBuilder_; + + public java.util.List getColumnValueList() { + if (columnValueBuilder_ == null) { + return java.util.Collections.unmodifiableList(columnValue_); + } else { + return columnValueBuilder_.getMessageList(); + } + } + public int getColumnValueCount() { + if (columnValueBuilder_ == null) { + return columnValue_.size(); + } else { + return columnValueBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue getColumnValue(int index) { + if (columnValueBuilder_ == null) { + return columnValue_.get(index); + } else { + return columnValueBuilder_.getMessage(index); + } + } + public Builder setColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.set(index, value); + onChanged(); + } else { + columnValueBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.set(index, builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.add(value); + onChanged(); + } else { + columnValueBuilder_.addMessage(value); + } + return this; + } + public Builder addColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.add(index, value); + onChanged(); + } else { + columnValueBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumnValue( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.add(builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.add(index, builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumnValue( + java.lang.Iterable values) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + super.addAll(values, columnValue_); + onChanged(); + } else { + columnValueBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumnValue() { + if (columnValueBuilder_ == null) { + columnValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + columnValueBuilder_.clear(); + } + return this; + } + public Builder removeColumnValue(int index) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.remove(index); + onChanged(); + } else { + columnValueBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder getColumnValueBuilder( + int index) { + return getColumnValueFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index) { + if (columnValueBuilder_ == null) { + return columnValue_.get(index); } else { + return columnValueBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnValueOrBuilderList() { + if (columnValueBuilder_ != null) { + return columnValueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(columnValue_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder() { + return getColumnValueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder( + int index) { + return getColumnValueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.getDefaultInstance()); + } + public java.util.List + getColumnValueBuilderList() { + return getColumnValueFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder> + getColumnValueFieldBuilder() { + if (columnValueBuilder_ == null) { + columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValueOrBuilder>( + columnValue_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + columnValue_ = null; + } + return columnValueBuilder_; + } + + // repeated .NameBytesPair attribute = 4; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + attribute_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional uint64 timestamp = 5; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000010; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000010); + timestamp_ = 0L; + onChanged(); + return this; + } + + // optional uint64 lockId = 6; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000020; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000020); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional bool writeToWAL = 7 [default = true]; + private boolean writeToWAL_ = true; + public boolean hasWriteToWAL() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public boolean getWriteToWAL() { + return writeToWAL_; + } + public Builder setWriteToWAL(boolean value) { + bitField0_ |= 0x00000040; + writeToWAL_ = value; + onChanged(); + return this; + } + public Builder clearWriteToWAL() { + bitField0_ = (bitField0_ & ~0x00000040); + writeToWAL_ = true; + onChanged(); + return this; + } + + // optional .TimeRange timeRange = 10; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000080) == 0x00000080) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000080; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Mutate) + } + + static { + defaultInstance = new Mutate(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate) + } + + public interface MutateRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Mutate mutate = 2; + boolean hasMutate(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder(); + + // optional .Condition condition = 3; + boolean hasCondition(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); + } + public static final class MutateRequest extends + com.google.protobuf.GeneratedMessage + implements MutateRequestOrBuilder { + // Use MutateRequest.newBuilder() to construct. + private MutateRequest(Builder builder) { + super(builder); + } + private MutateRequest(boolean noInit) {} + + private static final MutateRequest defaultInstance; + public static MutateRequest getDefaultInstance() { + return defaultInstance; + } + + public MutateRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Mutate mutate = 2; + public static final int MUTATE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_; + public boolean hasMutate() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { + return mutate_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { + return mutate_; + } + + // optional .Condition condition = 3; + public static final int CONDITION_FIELD_NUMBER = 3; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_; + public boolean hasCondition() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { + return condition_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { + return condition_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMutate()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getMutate().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasCondition()) { + if (!getCondition().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, mutate_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(3, condition_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, mutate_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, condition_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasMutate() == other.hasMutate()); + if (hasMutate()) { + result = result && getMutate() + .equals(other.getMutate()); + } + result = result && (hasCondition() == other.hasCondition()); + if (hasCondition()) { + result = result && getCondition() + .equals(other.getCondition()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasMutate()) { + hash = (37 * hash) + MUTATE_FIELD_NUMBER; + hash = (53 * hash) + getMutate().hashCode(); + } + if (hasCondition()) { + hash = (37 * hash) + CONDITION_FIELD_NUMBER; + hash = (53 * hash) + getCondition().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getMutateFieldBuilder(); + getConditionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (mutateBuilder_ == null) { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + } else { + mutateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + if (conditionBuilder_ == null) { + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + } else { + conditionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (mutateBuilder_ == null) { + result.mutate_ = mutate_; + } else { + result.mutate_ = mutateBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + if (conditionBuilder_ == null) { + result.condition_ = condition_; + } else { + result.condition_ = conditionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasMutate()) { + mergeMutate(other.getMutate()); + } + if (other.hasCondition()) { + mergeCondition(other.getCondition()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasMutate()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getMutate().isInitialized()) { + + return false; + } + if (hasCondition()) { + if (!getCondition().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(); + if (hasMutate()) { + subBuilder.mergeFrom(getMutate()); + } + input.readMessage(subBuilder, extensionRegistry); + setMutate(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(); + if (hasCondition()) { + subBuilder.mergeFrom(getCondition()); + } + input.readMessage(subBuilder, extensionRegistry); + setCondition(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Mutate mutate = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> mutateBuilder_; + public boolean hasMutate() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate getMutate() { + if (mutateBuilder_ == null) { + return mutate_; + } else { + return mutateBuilder_.getMessage(); + } + } + public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutateBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + mutate_ = value; + onChanged(); + } else { + mutateBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setMutate( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder builderForValue) { + if (mutateBuilder_ == null) { + mutate_ = builderForValue.build(); + onChanged(); + } else { + mutateBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate value) { + if (mutateBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + mutate_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance()) { + mutate_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); + } else { + mutate_ = value; + } + onChanged(); + } else { + mutateBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearMutate() { + if (mutateBuilder_ == null) { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance(); + onChanged(); + } else { + mutateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder getMutateBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getMutateFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder getMutateOrBuilder() { + if (mutateBuilder_ != null) { + return mutateBuilder_.getMessageOrBuilder(); + } else { + return mutate_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder> + getMutateFieldBuilder() { + if (mutateBuilder_ == null) { + mutateBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateOrBuilder>( + mutate_, + getParentForChildren(), + isClean()); + mutate_ = null; + } + return mutateBuilder_; + } + + // optional .Condition condition = 3; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; + public boolean hasCondition() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { + if (conditionBuilder_ == null) { + return condition_; + } else { + return conditionBuilder_.getMessage(); + } + } + public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { + if (conditionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + condition_ = value; + onChanged(); + } else { + conditionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder setCondition( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) { + if (conditionBuilder_ == null) { + condition_ = builderForValue.build(); + onChanged(); + } else { + conditionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { + if (conditionBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004) && + condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { + condition_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); + } else { + condition_ = value; + } + onChanged(); + } else { + conditionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder clearCondition() { + if (conditionBuilder_ == null) { + condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); + onChanged(); + } else { + conditionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getConditionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { + if (conditionBuilder_ != null) { + return conditionBuilder_.getMessageOrBuilder(); + } else { + return condition_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> + getConditionFieldBuilder() { + if (conditionBuilder_ == null) { + conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>( + condition_, + getParentForChildren(), + isClean()); + condition_ = null; + } + return conditionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MutateRequest) + } + + static { + defaultInstance = new MutateRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MutateRequest) + } + + public interface MutateResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .Result result = 1; + boolean hasResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); + + // optional bool processed = 2; + boolean hasProcessed(); + boolean getProcessed(); + } + public static final class MutateResponse extends + com.google.protobuf.GeneratedMessage + implements MutateResponseOrBuilder { + // Use MutateResponse.newBuilder() to construct. + private MutateResponse(Builder builder) { + super(builder); + } + private MutateResponse(boolean noInit) {} + + private static final MutateResponse defaultInstance; + public static MutateResponse getDefaultInstance() { + return defaultInstance; + } + + public MutateResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + return result_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + return result_; + } + + // optional bool processed = 2; + public static final int PROCESSED_FIELD_NUMBER = 2; + private boolean processed_; + public boolean hasProcessed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getProcessed() { + return processed_; + } + + private void initFields() { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + processed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, processed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, processed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj; + + boolean result = true; + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && (hasProcessed() == other.hasProcessed()); + if (hasProcessed()) { + result = result && (getProcessed() + == other.getProcessed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + if (hasProcessed()) { + hash = (37 * hash) + PROCESSED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getProcessed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + processed_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (resultBuilder_ == null) { + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.processed_ = processed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this; + if (other.hasResult()) { + mergeResult(other.getResult()); + } + if (other.hasProcessed()) { + setProcessed(other.getProcessed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); + if (hasResult()) { + subBuilder.mergeFrom(getResult()); + } + input.readMessage(subBuilder, extensionRegistry); + setResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + processed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional .Result result = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { + if (resultBuilder_ == null) { + return result_; + } else { + return resultBuilder_.getMessage(); + } + } + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + result_ = value; + onChanged(); + } else { + resultBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + result_ = builderForValue.build(); + onChanged(); + } else { + resultBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { + result_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + } else { + result_ = value; + } + onChanged(); + } else { + resultBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); + onChanged(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getResultFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilder(); + } else { + return result_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( + result_, + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional bool processed = 2; + private boolean processed_ ; + public boolean hasProcessed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getProcessed() { + return processed_; + } + public Builder setProcessed(boolean value) { + bitField0_ |= 0x00000002; + processed_ = value; + onChanged(); + return this; + } + public Builder clearProcessed() { + bitField0_ = (bitField0_ & ~0x00000002); + processed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MutateResponse) + } + + static { + defaultInstance = new MutateResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MutateResponse) + } + + public interface ScanOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Column column = 1; + java.util.List + getColumnList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); + int getColumnCount(); + java.util.List + getColumnOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index); + + // repeated .NameBytesPair attribute = 2; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index); + + // optional bytes startRow = 3; + boolean hasStartRow(); + com.google.protobuf.ByteString getStartRow(); + + // optional bytes stopRow = 4; + boolean hasStopRow(); + com.google.protobuf.ByteString getStopRow(); + + // optional .NameBytesPair filter = 5; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder(); + + // optional .TimeRange timeRange = 6; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + + // optional uint32 maxVersions = 7 [default = 1]; + boolean hasMaxVersions(); + int getMaxVersions(); + + // optional bool cacheBlocks = 8 [default = true]; + boolean hasCacheBlocks(); + boolean getCacheBlocks(); + + // optional uint32 batchSize = 9; + boolean hasBatchSize(); + int getBatchSize(); + } + public static final class Scan extends + com.google.protobuf.GeneratedMessage + implements ScanOrBuilder { + // Use Scan.newBuilder() to construct. + private Scan(Builder builder) { + super(builder); + } + private Scan(boolean noInit) {} + + private static final Scan defaultInstance; + public static Scan getDefaultInstance() { + return defaultInstance; + } + + public Scan getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; + } + + private int bitField0_; + // repeated .Column column = 1; + public static final int COLUMN_FIELD_NUMBER = 1; + private java.util.List column_; + public java.util.List getColumnList() { + return column_; + } + public java.util.List + getColumnOrBuilderList() { + return column_; + } + public int getColumnCount() { + return column_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { + return column_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + return column_.get(index); + } + + // repeated .NameBytesPair attribute = 2; + public static final int ATTRIBUTE_FIELD_NUMBER = 2; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional bytes startRow = 3; + public static final int STARTROW_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString startRow_; + public boolean hasStartRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getStartRow() { + return startRow_; + } + + // optional bytes stopRow = 4; + public static final int STOPROW_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString stopRow_; + public boolean hasStopRow() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getStopRow() { + return stopRow_; + } + + // optional .NameBytesPair filter = 5; + public static final int FILTER_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder() { + return filter_; + } + + // optional .TimeRange timeRange = 6; + public static final int TIMERANGE_FIELD_NUMBER = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + public static final int MAXVERSIONS_FIELD_NUMBER = 7; + private int maxVersions_; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public int getMaxVersions() { + return maxVersions_; + } + + // optional bool cacheBlocks = 8 [default = true]; + public static final int CACHEBLOCKS_FIELD_NUMBER = 8; + private boolean cacheBlocks_; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + + // optional uint32 batchSize = 9; + public static final int BATCHSIZE_FIELD_NUMBER = 9; + private int batchSize_; + public boolean hasBatchSize() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getBatchSize() { + return batchSize_; + } + + private void initFields() { + column_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + startRow_ = com.google.protobuf.ByteString.EMPTY; + stopRow_ = com.google.protobuf.ByteString.EMPTY; + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + maxVersions_ = 1; + cacheBlocks_ = true; + batchSize_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < column_.size(); i++) { + output.writeMessage(1, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(2, attribute_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(3, startRow_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(4, stopRow_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeUInt32(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(8, cacheBlocks_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeUInt32(9, batchSize_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < column_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, attribute_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, startRow_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, stopRow_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, cacheBlocks_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(9, batchSize_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj; + + boolean result = true; + result = result && getColumnList() + .equals(other.getColumnList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasStartRow() == other.hasStartRow()); + if (hasStartRow()) { + result = result && getStartRow() + .equals(other.getStartRow()); + } + result = result && (hasStopRow() == other.hasStopRow()); + if (hasStopRow()) { + result = result && getStopRow() + .equals(other.getStopRow()); + } + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && (hasMaxVersions() == other.hasMaxVersions()); + if (hasMaxVersions()) { + result = result && (getMaxVersions() + == other.getMaxVersions()); + } + result = result && (hasCacheBlocks() == other.hasCacheBlocks()); + if (hasCacheBlocks()) { + result = result && (getCacheBlocks() + == other.getCacheBlocks()); + } + result = result && (hasBatchSize() == other.hasBatchSize()); + if (hasBatchSize()) { + result = result && (getBatchSize() + == other.getBatchSize()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getColumnCount() > 0) { + hash = (37 * hash) + COLUMN_FIELD_NUMBER; + hash = (53 * hash) + getColumnList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasStartRow()) { + hash = (37 * hash) + STARTROW_FIELD_NUMBER; + hash = (53 * hash) + getStartRow().hashCode(); + } + if (hasStopRow()) { + hash = (37 * hash) + STOPROW_FIELD_NUMBER; + hash = (53 * hash) + getStopRow().hashCode(); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + if (hasMaxVersions()) { + hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; + hash = (53 * hash) + getMaxVersions(); + } + if (hasCacheBlocks()) { + hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCacheBlocks()); + } + if (hasBatchSize()) { + hash = (37 * hash) + BATCHSIZE_FIELD_NUMBER; + hash = (53 * hash) + getBatchSize(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFieldBuilder(); + getAttributeFieldBuilder(); + getFilterFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + columnBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + attributeBuilder_.clear(); + } + startRow_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + stopRow_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + maxVersions_ = 1; + bitField0_ = (bitField0_ & ~0x00000040); + cacheBlocks_ = true; + bitField0_ = (bitField0_ & ~0x00000080); + batchSize_ = 0; + bitField0_ = (bitField0_ & ~0x00000100); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (columnBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + column_ = java.util.Collections.unmodifiableList(column_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.column_ = column_; + } else { + result.column_ = columnBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000001; + } + result.startRow_ = startRow_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000002; + } + result.stopRow_ = stopRow_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.maxVersions_ = maxVersions_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + result.cacheBlocks_ = cacheBlocks_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000040; + } + result.batchSize_ = batchSize_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this; + if (columnBuilder_ == null) { + if (!other.column_.isEmpty()) { + if (column_.isEmpty()) { + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureColumnIsMutable(); + column_.addAll(other.column_); + } + onChanged(); + } + } else { + if (!other.column_.isEmpty()) { + if (columnBuilder_.isEmpty()) { + columnBuilder_.dispose(); + columnBuilder_ = null; + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000001); + columnBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnFieldBuilder() : null; + } else { + columnBuilder_.addAllMessages(other.column_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000002); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasStartRow()) { + setStartRow(other.getStartRow()); + } + if (other.hasStopRow()) { + setStopRow(other.getStopRow()); + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + if (other.hasMaxVersions()) { + setMaxVersions(other.getMaxVersions()); + } + if (other.hasCacheBlocks()) { + setCacheBlocks(other.getCacheBlocks()); + } + if (other.hasBatchSize()) { + setBatchSize(other.getBatchSize()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumn(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 26: { + bitField0_ |= 0x00000004; + startRow_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + stopRow_ = input.readBytes(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + case 56: { + bitField0_ |= 0x00000040; + maxVersions_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + cacheBlocks_ = input.readBool(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + batchSize_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Column column = 1; + private java.util.List column_ = + java.util.Collections.emptyList(); + private void ensureColumnIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + column_ = new java.util.ArrayList(column_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; + + public java.util.List getColumnList() { + if (columnBuilder_ == null) { + return java.util.Collections.unmodifiableList(column_); + } else { + return columnBuilder_.getMessageList(); + } + } + public int getColumnCount() { + if (columnBuilder_ == null) { + return column_.size(); + } else { + return columnBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { + if (columnBuilder_ == null) { + return column_.get(index); + } else { + return columnBuilder_.getMessage(index); + } + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.set(index, value); + onChanged(); + } else { + columnBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.set(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(value); + onChanged(); + } else { + columnBuilder_.addMessage(value); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(index, value); + onChanged(); + } else { + columnBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumn( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumn( + java.lang.Iterable values) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + super.addAll(values, column_); + onChanged(); + } else { + columnBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumn() { + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + columnBuilder_.clear(); + } + return this; + } + public Builder removeColumn(int index) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.remove(index); + onChanged(); + } else { + columnBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( + int index) { + return getColumnFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + if (columnBuilder_ == null) { + return column_.get(index); } else { + return columnBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnOrBuilderList() { + if (columnBuilder_ != null) { + return columnBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(column_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { + return getColumnFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( + int index) { + return getColumnFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); + } + public java.util.List + getColumnBuilderList() { + return getColumnFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> + getColumnFieldBuilder() { + if (columnBuilder_ == null) { + columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( + column_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + column_ = null; + } + return columnBuilder_; + } + + // repeated .NameBytesPair attribute = 2; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + attribute_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional bytes startRow = 3; + private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStartRow() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getStartRow() { + return startRow_; + } + public Builder setStartRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + startRow_ = value; + onChanged(); + return this; + } + public Builder clearStartRow() { + bitField0_ = (bitField0_ & ~0x00000004); + startRow_ = getDefaultInstance().getStartRow(); + onChanged(); + return this; + } + + // optional bytes stopRow = 4; + private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStopRow() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getStopRow() { + return stopRow_; + } + public Builder setStopRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + stopRow_ = value; + onChanged(); + return this; + } + public Builder clearStopRow() { + bitField0_ = (bitField0_ & ~0x00000008); + stopRow_ = getDefaultInstance().getStopRow(); + onChanged(); + return this; + } + + // optional .NameBytesPair filter = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getFilterBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // optional .TimeRange timeRange = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + private int maxVersions_ = 1; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getMaxVersions() { + return maxVersions_; + } + public Builder setMaxVersions(int value) { + bitField0_ |= 0x00000040; + maxVersions_ = value; + onChanged(); + return this; + } + public Builder clearMaxVersions() { + bitField0_ = (bitField0_ & ~0x00000040); + maxVersions_ = 1; + onChanged(); + return this; + } + + // optional bool cacheBlocks = 8 [default = true]; + private boolean cacheBlocks_ = true; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + public Builder setCacheBlocks(boolean value) { + bitField0_ |= 0x00000080; + cacheBlocks_ = value; + onChanged(); + return this; + } + public Builder clearCacheBlocks() { + bitField0_ = (bitField0_ & ~0x00000080); + cacheBlocks_ = true; + onChanged(); + return this; + } + + // optional uint32 batchSize = 9; + private int batchSize_ ; + public boolean hasBatchSize() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public int getBatchSize() { + return batchSize_; + } + public Builder setBatchSize(int value) { + bitField0_ |= 0x00000100; + batchSize_ = value; + onChanged(); + return this; + } + public Builder clearBatchSize() { + bitField0_ = (bitField0_ & ~0x00000100); + batchSize_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Scan) + } + + static { + defaultInstance = new Scan(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Scan) + } + + public interface ScanRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional .Scan scan = 2; + boolean hasScan(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); + + // optional uint64 scannerId = 3; + boolean hasScannerId(); + long getScannerId(); + + // optional uint32 numberOfRows = 4; + boolean hasNumberOfRows(); + int getNumberOfRows(); + + // optional bool closeScanner = 5; + boolean hasCloseScanner(); + boolean getCloseScanner(); + } + public static final class ScanRequest extends + com.google.protobuf.GeneratedMessage + implements ScanRequestOrBuilder { + // Use ScanRequest.newBuilder() to construct. + private ScanRequest(Builder builder) { + super(builder); + } + private ScanRequest(boolean noInit) {} + + private static final ScanRequest defaultInstance; + public static ScanRequest getDefaultInstance() { + return defaultInstance; + } + + public ScanRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; + } + + private int bitField0_; + // optional .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional .Scan scan = 2; + public static final int SCAN_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + return scan_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // optional uint64 scannerId = 3; + public static final int SCANNERID_FIELD_NUMBER = 3; + private long scannerId_; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getScannerId() { + return scannerId_; + } + + // optional uint32 numberOfRows = 4; + public static final int NUMBEROFROWS_FIELD_NUMBER = 4; + private int numberOfRows_; + public boolean hasNumberOfRows() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getNumberOfRows() { + return numberOfRows_; + } + + // optional bool closeScanner = 5; + public static final int CLOSESCANNER_FIELD_NUMBER = 5; + private boolean closeScanner_; + public boolean hasCloseScanner() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getCloseScanner() { + return closeScanner_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + scannerId_ = 0L; + numberOfRows_ = 0; + closeScanner_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasRegion()) { + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasScan()) { + if (!getScan().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, scannerId_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt32(4, numberOfRows_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, closeScanner_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, scannerId_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(4, numberOfRows_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, closeScanner_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasScan() == other.hasScan()); + if (hasScan()) { + result = result && getScan() + .equals(other.getScan()); + } + result = result && (hasScannerId() == other.hasScannerId()); + if (hasScannerId()) { + result = result && (getScannerId() + == other.getScannerId()); + } + result = result && (hasNumberOfRows() == other.hasNumberOfRows()); + if (hasNumberOfRows()) { + result = result && (getNumberOfRows() + == other.getNumberOfRows()); + } + result = result && (hasCloseScanner() == other.hasCloseScanner()); + if (hasCloseScanner()) { + result = result && (getCloseScanner() + == other.getCloseScanner()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasScan()) { + hash = (37 * hash) + SCAN_FIELD_NUMBER; + hash = (53 * hash) + getScan().hashCode(); + } + if (hasScannerId()) { + hash = (37 * hash) + SCANNERID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getScannerId()); + } + if (hasNumberOfRows()) { + hash = (37 * hash) + NUMBEROFROWS_FIELD_NUMBER; + hash = (53 * hash) + getNumberOfRows(); + } + if (hasCloseScanner()) { + hash = (37 * hash) + CLOSESCANNER_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCloseScanner()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getScanFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + scannerId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + numberOfRows_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + closeScanner_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (scanBuilder_ == null) { + result.scan_ = scan_; + } else { + result.scan_ = scanBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.scannerId_ = scannerId_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.numberOfRows_ = numberOfRows_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.closeScanner_ = closeScanner_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasScan()) { + mergeScan(other.getScan()); + } + if (other.hasScannerId()) { + setScannerId(other.getScannerId()); + } + if (other.hasNumberOfRows()) { + setNumberOfRows(other.getNumberOfRows()); + } + if (other.hasCloseScanner()) { + setCloseScanner(other.getCloseScanner()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasRegion()) { + if (!getRegion().isInitialized()) { + + return false; + } + } + if (hasScan()) { + if (!getScan().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(); + if (hasScan()) { + subBuilder.mergeFrom(getScan()); + } + input.readMessage(subBuilder, extensionRegistry); + setScan(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + scannerId_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + numberOfRows_ = input.readUInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + closeScanner_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional .Scan scan = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + if (scanBuilder_ == null) { + return scan_; + } else { + return scanBuilder_.getMessage(); + } + } + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scan_ = value; + onChanged(); + } else { + scanBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setScan( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { + if (scanBuilder_ == null) { + scan_ = builderForValue.build(); + onChanged(); + } else { + scanBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { + scan_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + } else { + scan_ = value; + } + onChanged(); + } else { + scanBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearScan() { + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + onChanged(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getScanFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + if (scanBuilder_ != null) { + return scanBuilder_.getMessageOrBuilder(); + } else { + return scan_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> + getScanFieldBuilder() { + if (scanBuilder_ == null) { + scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( + scan_, + getParentForChildren(), + isClean()); + scan_ = null; + } + return scanBuilder_; + } + + // optional uint64 scannerId = 3; + private long scannerId_ ; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getScannerId() { + return scannerId_; + } + public Builder setScannerId(long value) { + bitField0_ |= 0x00000004; + scannerId_ = value; + onChanged(); + return this; + } + public Builder clearScannerId() { + bitField0_ = (bitField0_ & ~0x00000004); + scannerId_ = 0L; + onChanged(); + return this; + } + + // optional uint32 numberOfRows = 4; + private int numberOfRows_ ; + public boolean hasNumberOfRows() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getNumberOfRows() { + return numberOfRows_; + } + public Builder setNumberOfRows(int value) { + bitField0_ |= 0x00000008; + numberOfRows_ = value; + onChanged(); + return this; + } + public Builder clearNumberOfRows() { + bitField0_ = (bitField0_ & ~0x00000008); + numberOfRows_ = 0; + onChanged(); + return this; + } + + // optional bool closeScanner = 5; + private boolean closeScanner_ ; + public boolean hasCloseScanner() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getCloseScanner() { + return closeScanner_; + } + public Builder setCloseScanner(boolean value) { + bitField0_ |= 0x00000010; + closeScanner_ = value; + onChanged(); + return this; + } + public Builder clearCloseScanner() { + bitField0_ = (bitField0_ & ~0x00000010); + closeScanner_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ScanRequest) + } + + static { + defaultInstance = new ScanRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ScanRequest) + } + + public interface ScanResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Result result = 1; + java.util.List + getResultList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index); + int getResultCount(); + java.util.List + getResultOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( + int index); + + // optional uint64 scannerId = 2; + boolean hasScannerId(); + long getScannerId(); + + // optional bool moreResults = 3; + boolean hasMoreResults(); + boolean getMoreResults(); + + // optional uint32 ttl = 4; + boolean hasTtl(); + int getTtl(); + } + public static final class ScanResponse extends + com.google.protobuf.GeneratedMessage + implements ScanResponseOrBuilder { + // Use ScanResponse.newBuilder() to construct. + private ScanResponse(Builder builder) { + super(builder); + } + private ScanResponse(boolean noInit) {} + + private static final ScanResponse defaultInstance; + public static ScanResponse getDefaultInstance() { + return defaultInstance; + } + + public ScanResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; + } + + private int bitField0_; + // repeated .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + public java.util.List getResultList() { + return result_; + } + public java.util.List + getResultOrBuilderList() { + return result_; + } + public int getResultCount() { + return result_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { + return result_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( + int index) { + return result_.get(index); + } + + // optional uint64 scannerId = 2; + public static final int SCANNERID_FIELD_NUMBER = 2; + private long scannerId_; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getScannerId() { + return scannerId_; + } + + // optional bool moreResults = 3; + public static final int MORERESULTS_FIELD_NUMBER = 3; + private boolean moreResults_; + public boolean hasMoreResults() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMoreResults() { + return moreResults_; + } + + // optional uint32 ttl = 4; + public static final int TTL_FIELD_NUMBER = 4; + private int ttl_; + public boolean hasTtl() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getTtl() { + return ttl_; + } + + private void initFields() { + result_ = java.util.Collections.emptyList(); + scannerId_ = 0L; + moreResults_ = false; + ttl_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(2, scannerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(3, moreResults_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt32(4, ttl_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < result_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, scannerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, moreResults_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(4, ttl_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && (hasScannerId() == other.hasScannerId()); + if (hasScannerId()) { + result = result && (getScannerId() + == other.getScannerId()); + } + result = result && (hasMoreResults() == other.hasMoreResults()); + if (hasMoreResults()) { + result = result && (getMoreResults() + == other.getMoreResults()); + } + result = result && (hasTtl() == other.hasTtl()); + if (hasTtl()) { + result = result && (getTtl() + == other.getTtl()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + if (hasScannerId()) { + hash = (37 * hash) + SCANNERID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getScannerId()); + } + if (hasMoreResults()) { + hash = (37 * hash) + MORERESULTS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMoreResults()); + } + if (hasTtl()) { + hash = (37 * hash) + TTL_FIELD_NUMBER; + hash = (53 * hash) + getTtl(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultBuilder_.clear(); + } + scannerId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + moreResults_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + ttl_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.scannerId_ = scannerId_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.moreResults_ = moreResults_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; + } + result.ttl_ = ttl_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + } else { + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + resultBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultFieldBuilder() : null; + } else { + resultBuilder_.addAllMessages(other.result_); + } + } + } + if (other.hasScannerId()) { + setScannerId(other.getScannerId()); + } + if (other.hasMoreResults()) { + setMoreResults(other.getMoreResults()); + } + if (other.hasTtl()) { + setTtl(other.getTtl()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + scannerId_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + moreResults_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + ttl_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Result result = 1; + private java.util.List result_ = + java.util.Collections.emptyList(); + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(result_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; + + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); + } else { + return resultBuilder_.getMessageList(); + } + } + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); + } else { + return resultBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); + } else { + return resultBuilder_.getMessage(index); + } + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + } else { + resultBuilder_.setMessage(index, value); + } + return this; + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } else { + resultBuilder_.addMessage(value); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(index, value); + onChanged(); + } else { + resultBuilder_.addMessage(index, value); + } + return this; + } + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + } else { + resultBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultBuilder_.clear(); + } + return this; + } + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); + onChanged(); + } else { + resultBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder( + int index) { + return getResultFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder( + int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(result_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultBuilder( + int index) { + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()); + } + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( + result_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional uint64 scannerId = 2; + private long scannerId_ ; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getScannerId() { + return scannerId_; + } + public Builder setScannerId(long value) { + bitField0_ |= 0x00000002; + scannerId_ = value; + onChanged(); + return this; + } + public Builder clearScannerId() { + bitField0_ = (bitField0_ & ~0x00000002); + scannerId_ = 0L; + onChanged(); + return this; + } + + // optional bool moreResults = 3; + private boolean moreResults_ ; + public boolean hasMoreResults() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getMoreResults() { + return moreResults_; + } + public Builder setMoreResults(boolean value) { + bitField0_ |= 0x00000004; + moreResults_ = value; + onChanged(); + return this; + } + public Builder clearMoreResults() { + bitField0_ = (bitField0_ & ~0x00000004); + moreResults_ = false; + onChanged(); + return this; + } + + // optional uint32 ttl = 4; + private int ttl_ ; + public boolean hasTtl() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getTtl() { + return ttl_; + } + public Builder setTtl(int value) { + bitField0_ |= 0x00000008; + ttl_ = value; + onChanged(); + return this; + } + public Builder clearTtl() { + bitField0_ = (bitField0_ & ~0x00000008); + ttl_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ScanResponse) + } + + static { + defaultInstance = new ScanResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ScanResponse) + } + + public interface LockRowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated bytes row = 2; + java.util.List getRowList(); + int getRowCount(); + com.google.protobuf.ByteString getRow(int index); + } + public static final class LockRowRequest extends + com.google.protobuf.GeneratedMessage + implements LockRowRequestOrBuilder { + // Use LockRowRequest.newBuilder() to construct. + private LockRowRequest(Builder builder) { + super(builder); + } + private LockRowRequest(boolean noInit) {} + + private static final LockRowRequest defaultInstance; + public static LockRowRequest getDefaultInstance() { + return defaultInstance; + } + + public LockRowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated bytes row = 2; + public static final int ROW_FIELD_NUMBER = 2; + private java.util.List row_; + public java.util.List + getRowList() { + return row_; + } + public int getRowCount() { + return row_.size(); + } + public com.google.protobuf.ByteString getRow(int index) { + return row_.get(index); + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + row_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < row_.size(); i++) { + output.writeBytes(2, row_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + { + int dataSize = 0; + for (int i = 0; i < row_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(row_.get(i)); + } + size += dataSize; + size += 1 * getRowList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getRowList() + .equals(other.getRowList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getRowCount() > 0) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRowList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + row_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + row_ = java.util.Collections.unmodifiableList(row_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.row_ = row_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (!other.row_.isEmpty()) { + if (row_.isEmpty()) { + row_ = other.row_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureRowIsMutable(); + row_.addAll(other.row_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + ensureRowIsMutable(); + row_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated bytes row = 2; + private java.util.List row_ = java.util.Collections.emptyList();; + private void ensureRowIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + row_ = new java.util.ArrayList(row_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getRowList() { + return java.util.Collections.unmodifiableList(row_); + } + public int getRowCount() { + return row_.size(); + } + public com.google.protobuf.ByteString getRow(int index) { + return row_.get(index); + } + public Builder setRow( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRowIsMutable(); + row_.set(index, value); + onChanged(); + return this; + } + public Builder addRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRowIsMutable(); + row_.add(value); + onChanged(); + return this; + } + public Builder addAllRow( + java.lang.Iterable values) { + ensureRowIsMutable(); + super.addAll(values, row_); + onChanged(); + return this; + } + public Builder clearRow() { + row_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LockRowRequest) + } + + static { + defaultInstance = new LockRowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LockRowRequest) + } + + public interface LockRowResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 lockId = 1; + boolean hasLockId(); + long getLockId(); + + // optional uint32 ttl = 2; + boolean hasTtl(); + int getTtl(); + } + public static final class LockRowResponse extends + com.google.protobuf.GeneratedMessage + implements LockRowResponseOrBuilder { + // Use LockRowResponse.newBuilder() to construct. + private LockRowResponse(Builder builder) { + super(builder); + } + private LockRowResponse(boolean noInit) {} + + private static final LockRowResponse defaultInstance; + public static LockRowResponse getDefaultInstance() { + return defaultInstance; + } + + public LockRowResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 lockId = 1; + public static final int LOCKID_FIELD_NUMBER = 1; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLockId() { + return lockId_; + } + + // optional uint32 ttl = 2; + public static final int TTL_FIELD_NUMBER = 2; + private int ttl_; + public boolean hasTtl() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTtl() { + return ttl_; + } + + private void initFields() { + lockId_ = 0L; + ttl_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLockId()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, lockId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, ttl_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, lockId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, ttl_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) obj; + + boolean result = true; + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasTtl() == other.hasTtl()); + if (hasTtl()) { + result = result && (getTtl() + == other.getTtl()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasTtl()) { + hash = (37 * hash) + TTL_FIELD_NUMBER; + hash = (53 * hash) + getTtl(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_LockRowResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + ttl_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ttl_ = ttl_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance()) return this; + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasTtl()) { + setTtl(other.getTtl()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLockId()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lockId_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ttl_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 lockId = 1; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000001; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000001); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional uint32 ttl = 2; + private int ttl_ ; + public boolean hasTtl() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTtl() { + return ttl_; + } + public Builder setTtl(int value) { + bitField0_ |= 0x00000002; + ttl_ = value; + onChanged(); + return this; + } + public Builder clearTtl() { + bitField0_ = (bitField0_ & ~0x00000002); + ttl_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LockRowResponse) + } + + static { + defaultInstance = new LockRowResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LockRowResponse) + } + + public interface UnlockRowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required uint64 lockId = 2; + boolean hasLockId(); + long getLockId(); + } + public static final class UnlockRowRequest extends + com.google.protobuf.GeneratedMessage + implements UnlockRowRequestOrBuilder { + // Use UnlockRowRequest.newBuilder() to construct. + private UnlockRowRequest(Builder builder) { + super(builder); + } + private UnlockRowRequest(boolean noInit) {} + + private static final UnlockRowRequest defaultInstance; + public static UnlockRowRequest getDefaultInstance() { + return defaultInstance; + } + + public UnlockRowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required uint64 lockId = 2; + public static final int LOCKID_FIELD_NUMBER = 2; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + lockId_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLockId()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, lockId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, lockId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.lockId_ = lockId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasLockId()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + lockId_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required uint64 lockId = 2; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000002; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000002); + lockId_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UnlockRowRequest) + } + + static { + defaultInstance = new UnlockRowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UnlockRowRequest) + } + + public interface UnlockRowResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class UnlockRowResponse extends + com.google.protobuf.GeneratedMessage + implements UnlockRowResponseOrBuilder { + // Use UnlockRowResponse.newBuilder() to construct. + private UnlockRowResponse(Builder builder) { + super(builder); + } + private UnlockRowResponse(boolean noInit) {} + + private static final UnlockRowResponse defaultInstance; + public static UnlockRowResponse getDefaultInstance() { + return defaultInstance; + } + + public UnlockRowResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:UnlockRowResponse) + } + + static { + defaultInstance = new UnlockRowResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UnlockRowResponse) + } + + public interface BulkLoadHFileRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + java.util.List + getFamilyPathList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); + int getFamilyPathCount(); + java.util.List + getFamilyPathOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index); + } + public static final class BulkLoadHFileRequest extends + com.google.protobuf.GeneratedMessage + implements BulkLoadHFileRequestOrBuilder { + // Use BulkLoadHFileRequest.newBuilder() to construct. + private BulkLoadHFileRequest(Builder builder) { + super(builder); + } + private BulkLoadHFileRequest(boolean noInit) {} + + private static final BulkLoadHFileRequest defaultInstance; + public static BulkLoadHFileRequest getDefaultInstance() { + return defaultInstance; + } + + public BulkLoadHFileRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + } + + public interface FamilyPathOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required string path = 2; + boolean hasPath(); + String getPath(); + } + public static final class FamilyPath extends + com.google.protobuf.GeneratedMessage + implements FamilyPathOrBuilder { + // Use FamilyPath.newBuilder() to construct. + private FamilyPath(Builder builder) { + super(builder); + } + private FamilyPath(boolean noInit) {} + + private static final FamilyPath defaultInstance; + public static FamilyPath getDefaultInstance() { + return defaultInstance; + } + + public FamilyPath getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required string path = 2; + public static final int PATH_FIELD_NUMBER = 2; + private java.lang.Object path_; + public boolean hasPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getPath() { + java.lang.Object ref = path_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + path_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + path_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasPath()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getPathBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getPathBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasPath() == other.hasPath()); + if (hasPath()) { + result = result && getPath() + .equals(other.getPath()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasPath()) { + hash = (37 * hash) + PATH_FIELD_NUMBER; + hash = (53 * hash) + getPath().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + path_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.path_ = path_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasPath()) { + setPath(other.getPath()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + if (!hasPath()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + path_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required string path = 2; + private java.lang.Object path_ = ""; + public boolean hasPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getPath() { + java.lang.Object ref = path_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + path_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setPath(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + path_ = value; + onChanged(); + return this; + } + public Builder clearPath() { + bitField0_ = (bitField0_ & ~0x00000002); + path_ = getDefaultInstance().getPath(); + onChanged(); + return this; + } + void setPath(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + path_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest.FamilyPath) + } + + static { + defaultInstance = new FamilyPath(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest.FamilyPath) + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + public static final int FAMILYPATH_FIELD_NUMBER = 2; + private java.util.List familyPath_; + public java.util.List getFamilyPathList() { + return familyPath_; + } + public java.util.List + getFamilyPathOrBuilderList() { + return familyPath_; + } + public int getFamilyPathCount() { + return familyPath_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + return familyPath_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index) { + return familyPath_.get(index); + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + familyPath_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getFamilyPathCount(); i++) { + if (!getFamilyPath(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < familyPath_.size(); i++) { + output.writeMessage(2, familyPath_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + for (int i = 0; i < familyPath_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, familyPath_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getFamilyPathList() + .equals(other.getFamilyPathList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getFamilyPathCount() > 0) { + hash = (37 * hash) + FAMILYPATH_FIELD_NUMBER; + hash = (53 * hash) + getFamilyPathList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getFamilyPathFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (familyPathBuilder_ == null) { + familyPath_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + familyPathBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (familyPathBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + familyPath_ = java.util.Collections.unmodifiableList(familyPath_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.familyPath_ = familyPath_; + } else { + result.familyPath_ = familyPathBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (familyPathBuilder_ == null) { + if (!other.familyPath_.isEmpty()) { + if (familyPath_.isEmpty()) { + familyPath_ = other.familyPath_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFamilyPathIsMutable(); + familyPath_.addAll(other.familyPath_); + } + onChanged(); + } + } else { + if (!other.familyPath_.isEmpty()) { + if (familyPathBuilder_.isEmpty()) { + familyPathBuilder_.dispose(); + familyPathBuilder_ = null; + familyPath_ = other.familyPath_; + bitField0_ = (bitField0_ & ~0x00000002); + familyPathBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyPathFieldBuilder() : null; + } else { + familyPathBuilder_.addAllMessages(other.familyPath_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + for (int i = 0; i < getFamilyPathCount(); i++) { + if (!getFamilyPath(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFamilyPath(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + private java.util.List familyPath_ = + java.util.Collections.emptyList(); + private void ensureFamilyPathIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + familyPath_ = new java.util.ArrayList(familyPath_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; + + public java.util.List getFamilyPathList() { + if (familyPathBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyPath_); + } else { + return familyPathBuilder_.getMessageList(); + } + } + public int getFamilyPathCount() { + if (familyPathBuilder_ == null) { + return familyPath_.size(); + } else { + return familyPathBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + if (familyPathBuilder_ == null) { + return familyPath_.get(index); + } else { + return familyPathBuilder_.getMessage(index); + } + } + public Builder setFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.set(index, value); + onChanged(); + } else { + familyPathBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.set(index, builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.add(value); + onChanged(); + } else { + familyPathBuilder_.addMessage(value); + } + return this; + } + public Builder addFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.add(index, value); + onChanged(); + } else { + familyPathBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFamilyPath( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.add(builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.add(index, builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFamilyPath( + java.lang.Iterable values) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + super.addAll(values, familyPath_); + onChanged(); + } else { + familyPathBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFamilyPath() { + if (familyPathBuilder_ == null) { + familyPath_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + familyPathBuilder_.clear(); + } + return this; + } + public Builder removeFamilyPath(int index) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.remove(index); + onChanged(); + } else { + familyPathBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( + int index) { + return getFamilyPathFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index) { + if (familyPathBuilder_ == null) { + return familyPath_.get(index); } else { + return familyPathBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFamilyPathOrBuilderList() { + if (familyPathBuilder_ != null) { + return familyPathBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyPath_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { + return getFamilyPathFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( + int index) { + return getFamilyPathFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + } + public java.util.List + getFamilyPathBuilderList() { + return getFamilyPathFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> + getFamilyPathFieldBuilder() { + if (familyPathBuilder_ == null) { + familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( + familyPath_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + familyPath_ = null; + } + return familyPathBuilder_; + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest) + } + + static { + defaultInstance = new BulkLoadHFileRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest) + } + + public interface BulkLoadHFileResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool loaded = 1; + boolean hasLoaded(); + boolean getLoaded(); + } + public static final class BulkLoadHFileResponse extends + com.google.protobuf.GeneratedMessage + implements BulkLoadHFileResponseOrBuilder { + // Use BulkLoadHFileResponse.newBuilder() to construct. + private BulkLoadHFileResponse(Builder builder) { + super(builder); + } + private BulkLoadHFileResponse(boolean noInit) {} + + private static final BulkLoadHFileResponse defaultInstance; + public static BulkLoadHFileResponse getDefaultInstance() { + return defaultInstance; + } + + public BulkLoadHFileResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool loaded = 1; + public static final int LOADED_FIELD_NUMBER = 1; + private boolean loaded_; + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLoaded() { + return loaded_; + } + + private void initFields() { + loaded_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLoaded()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, loaded_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, loaded_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; + + boolean result = true; + result = result && (hasLoaded() == other.hasLoaded()); + if (hasLoaded()) { + result = result && (getLoaded() + == other.getLoaded()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLoaded()) { + hash = (37 * hash) + LOADED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getLoaded()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + loaded_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.loaded_ = loaded_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; + if (other.hasLoaded()) { + setLoaded(other.getLoaded()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLoaded()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + loaded_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool loaded = 1; + private boolean loaded_ ; + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLoaded() { + return loaded_; + } + public Builder setLoaded(boolean value) { + bitField0_ |= 0x00000001; + loaded_ = value; + onChanged(); + return this; + } + public Builder clearLoaded() { + bitField0_ = (bitField0_ & ~0x00000001); + loaded_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileResponse) + } + + static { + defaultInstance = new BulkLoadHFileResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse) + } + + public interface ExecOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required string protocolName = 2; + boolean hasProtocolName(); + String getProtocolName(); + + // required string methodName = 3; + boolean hasMethodName(); + String getMethodName(); + + // repeated .NameStringPair property = 4; + java.util.List + getPropertyList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index); + int getPropertyCount(); + java.util.List + getPropertyOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( + int index); + + // repeated .NameBytesPair parameter = 5; + java.util.List + getParameterList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index); + int getParameterCount(); + java.util.List + getParameterOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( + int index); + } + public static final class Exec extends + com.google.protobuf.GeneratedMessage + implements ExecOrBuilder { + // Use Exec.newBuilder() to construct. + private Exec(Builder builder) { + super(builder); + } + private Exec(boolean noInit) {} + + private static final Exec defaultInstance; + public static Exec getDefaultInstance() { + return defaultInstance; + } + + public Exec getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required string protocolName = 2; + public static final int PROTOCOLNAME_FIELD_NUMBER = 2; + private java.lang.Object protocolName_; + public boolean hasProtocolName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getProtocolName() { + java.lang.Object ref = protocolName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + protocolName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getProtocolNameBytes() { + java.lang.Object ref = protocolName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + protocolName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string methodName = 3; + public static final int METHODNAME_FIELD_NUMBER = 3; + private java.lang.Object methodName_; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + methodName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // repeated .NameStringPair property = 4; + public static final int PROPERTY_FIELD_NUMBER = 4; + private java.util.List property_; + public java.util.List getPropertyList() { + return property_; + } + public java.util.List + getPropertyOrBuilderList() { + return property_; + } + public int getPropertyCount() { + return property_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) { + return property_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( + int index) { + return property_.get(index); + } + + // repeated .NameBytesPair parameter = 5; + public static final int PARAMETER_FIELD_NUMBER = 5; + private java.util.List parameter_; + public java.util.List getParameterList() { + return parameter_; + } + public java.util.List + getParameterOrBuilderList() { + return parameter_; + } + public int getParameterCount() { + return parameter_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) { + return parameter_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( + int index) { + return parameter_.get(index); + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + protocolName_ = ""; + methodName_ = ""; + property_ = java.util.Collections.emptyList(); + parameter_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasProtocolName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMethodName()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getPropertyCount(); i++) { + if (!getProperty(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getParameterCount(); i++) { + if (!getParameter(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getProtocolNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getMethodNameBytes()); + } + for (int i = 0; i < property_.size(); i++) { + output.writeMessage(4, property_.get(i)); + } + for (int i = 0; i < parameter_.size(); i++) { + output.writeMessage(5, parameter_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getProtocolNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMethodNameBytes()); + } + for (int i = 0; i < property_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, property_.get(i)); + } + for (int i = 0; i < parameter_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, parameter_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasProtocolName() == other.hasProtocolName()); + if (hasProtocolName()) { + result = result && getProtocolName() + .equals(other.getProtocolName()); + } + result = result && (hasMethodName() == other.hasMethodName()); + if (hasMethodName()) { + result = result && getMethodName() + .equals(other.getMethodName()); + } + result = result && getPropertyList() + .equals(other.getPropertyList()); + result = result && getParameterList() + .equals(other.getParameterList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasProtocolName()) { + hash = (37 * hash) + PROTOCOLNAME_FIELD_NUMBER; + hash = (53 * hash) + getProtocolName().hashCode(); + } + if (hasMethodName()) { + hash = (37 * hash) + METHODNAME_FIELD_NUMBER; + hash = (53 * hash) + getMethodName().hashCode(); + } + if (getPropertyCount() > 0) { + hash = (37 * hash) + PROPERTY_FIELD_NUMBER; + hash = (53 * hash) + getPropertyList().hashCode(); + } + if (getParameterCount() > 0) { + hash = (37 * hash) + PARAMETER_FIELD_NUMBER; + hash = (53 * hash) + getParameterList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPropertyFieldBuilder(); + getParameterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + protocolName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + methodName_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + if (propertyBuilder_ == null) { + property_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + propertyBuilder_.clear(); + } + if (parameterBuilder_ == null) { + parameter_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + } else { + parameterBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.protocolName_ = protocolName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.methodName_ = methodName_; + if (propertyBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + property_ = java.util.Collections.unmodifiableList(property_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.property_ = property_; + } else { + result.property_ = propertyBuilder_.build(); + } + if (parameterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { + parameter_ = java.util.Collections.unmodifiableList(parameter_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.parameter_ = parameter_; + } else { + result.parameter_ = parameterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasProtocolName()) { + setProtocolName(other.getProtocolName()); + } + if (other.hasMethodName()) { + setMethodName(other.getMethodName()); + } + if (propertyBuilder_ == null) { + if (!other.property_.isEmpty()) { + if (property_.isEmpty()) { + property_ = other.property_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensurePropertyIsMutable(); + property_.addAll(other.property_); + } + onChanged(); + } + } else { + if (!other.property_.isEmpty()) { + if (propertyBuilder_.isEmpty()) { + propertyBuilder_.dispose(); + propertyBuilder_ = null; + property_ = other.property_; + bitField0_ = (bitField0_ & ~0x00000008); + propertyBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getPropertyFieldBuilder() : null; + } else { + propertyBuilder_.addAllMessages(other.property_); + } + } + } + if (parameterBuilder_ == null) { + if (!other.parameter_.isEmpty()) { + if (parameter_.isEmpty()) { + parameter_ = other.parameter_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureParameterIsMutable(); + parameter_.addAll(other.parameter_); + } + onChanged(); + } + } else { + if (!other.parameter_.isEmpty()) { + if (parameterBuilder_.isEmpty()) { + parameterBuilder_.dispose(); + parameterBuilder_ = null; + parameter_ = other.parameter_; + bitField0_ = (bitField0_ & ~0x00000010); + parameterBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getParameterFieldBuilder() : null; + } else { + parameterBuilder_.addAllMessages(other.parameter_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasProtocolName()) { + + return false; + } + if (!hasMethodName()) { + + return false; + } + for (int i = 0; i < getPropertyCount(); i++) { + if (!getProperty(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getParameterCount(); i++) { + if (!getParameter(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + protocolName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + methodName_ = input.readBytes(); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addProperty(subBuilder.buildPartial()); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addParameter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required string protocolName = 2; + private java.lang.Object protocolName_ = ""; + public boolean hasProtocolName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getProtocolName() { + java.lang.Object ref = protocolName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + protocolName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setProtocolName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + protocolName_ = value; + onChanged(); + return this; + } + public Builder clearProtocolName() { + bitField0_ = (bitField0_ & ~0x00000002); + protocolName_ = getDefaultInstance().getProtocolName(); + onChanged(); + return this; + } + void setProtocolName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + protocolName_ = value; + onChanged(); + } + + // required string methodName = 3; + private java.lang.Object methodName_ = ""; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + methodName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setMethodName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + return this; + } + public Builder clearMethodName() { + bitField0_ = (bitField0_ & ~0x00000004); + methodName_ = getDefaultInstance().getMethodName(); + onChanged(); + return this; + } + void setMethodName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + } + + // repeated .NameStringPair property = 4; + private java.util.List property_ = + java.util.Collections.emptyList(); + private void ensurePropertyIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + property_ = new java.util.ArrayList(property_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> propertyBuilder_; + + public java.util.List getPropertyList() { + if (propertyBuilder_ == null) { + return java.util.Collections.unmodifiableList(property_); + } else { + return propertyBuilder_.getMessageList(); + } + } + public int getPropertyCount() { + if (propertyBuilder_ == null) { + return property_.size(); + } else { + return propertyBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) { + if (propertyBuilder_ == null) { + return property_.get(index); + } else { + return propertyBuilder_.getMessage(index); + } + } + public Builder setProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.set(index, value); + onChanged(); + } else { + propertyBuilder_.setMessage(index, value); + } + return this; + } + public Builder setProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.set(index, builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addProperty(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.add(value); + onChanged(); + } else { + propertyBuilder_.addMessage(value); + } + return this; + } + public Builder addProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.add(index, value); + onChanged(); + } else { + propertyBuilder_.addMessage(index, value); + } + return this; + } + public Builder addProperty( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.add(builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.add(index, builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllProperty( + java.lang.Iterable values) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + super.addAll(values, property_); + onChanged(); + } else { + propertyBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearProperty() { + if (propertyBuilder_ == null) { + property_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + propertyBuilder_.clear(); + } + return this; + } + public Builder removeProperty(int index) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.remove(index); + onChanged(); + } else { + propertyBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getPropertyBuilder( + int index) { + return getPropertyFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder( + int index) { + if (propertyBuilder_ == null) { + return property_.get(index); } else { + return propertyBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getPropertyOrBuilderList() { + if (propertyBuilder_ != null) { + return propertyBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(property_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder() { + return getPropertyFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder( + int index) { + return getPropertyFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); + } + public java.util.List + getPropertyBuilderList() { + return getPropertyFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> + getPropertyFieldBuilder() { + if (propertyBuilder_ == null) { + propertyBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( + property_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + property_ = null; + } + return propertyBuilder_; + } + + // repeated .NameBytesPair parameter = 5; + private java.util.List parameter_ = + java.util.Collections.emptyList(); + private void ensureParameterIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + parameter_ = new java.util.ArrayList(parameter_); + bitField0_ |= 0x00000010; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> parameterBuilder_; + + public java.util.List getParameterList() { + if (parameterBuilder_ == null) { + return java.util.Collections.unmodifiableList(parameter_); + } else { + return parameterBuilder_.getMessageList(); + } + } + public int getParameterCount() { + if (parameterBuilder_ == null) { + return parameter_.size(); + } else { + return parameterBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) { + if (parameterBuilder_ == null) { + return parameter_.get(index); + } else { + return parameterBuilder_.getMessage(index); + } + } + public Builder setParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.set(index, value); + onChanged(); + } else { + parameterBuilder_.setMessage(index, value); + } + return this; + } + public Builder setParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.set(index, builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addParameter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.add(value); + onChanged(); + } else { + parameterBuilder_.addMessage(value); + } + return this; + } + public Builder addParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.add(index, value); + onChanged(); + } else { + parameterBuilder_.addMessage(index, value); + } + return this; + } + public Builder addParameter( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.add(builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.add(index, builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllParameter( + java.lang.Iterable values) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + super.addAll(values, parameter_); + onChanged(); + } else { + parameterBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearParameter() { + if (parameterBuilder_ == null) { + parameter_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + } else { + parameterBuilder_.clear(); + } + return this; + } + public Builder removeParameter(int index) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.remove(index); + onChanged(); + } else { + parameterBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getParameterBuilder( + int index) { + return getParameterFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder( + int index) { + if (parameterBuilder_ == null) { + return parameter_.get(index); } else { + return parameterBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getParameterOrBuilderList() { + if (parameterBuilder_ != null) { + return parameterBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(parameter_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder() { + return getParameterFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder( + int index) { + return getParameterFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getParameterBuilderList() { + return getParameterFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getParameterFieldBuilder() { + if (parameterBuilder_ == null) { + parameterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + parameter_, + ((bitField0_ & 0x00000010) == 0x00000010), + getParentForChildren(), + isClean()); + parameter_ = null; + } + return parameterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Exec) + } + + static { + defaultInstance = new Exec(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Exec) + } + + public interface ExecCoprocessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Exec call = 2; + boolean hasCall(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder(); + } + public static final class ExecCoprocessorRequest extends + com.google.protobuf.GeneratedMessage + implements ExecCoprocessorRequestOrBuilder { + // Use ExecCoprocessorRequest.newBuilder() to construct. + private ExecCoprocessorRequest(Builder builder) { + super(builder); + } + private ExecCoprocessorRequest(boolean noInit) {} + + private static final ExecCoprocessorRequest defaultInstance; + public static ExecCoprocessorRequest getDefaultInstance() { + return defaultInstance; + } + + public ExecCoprocessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Exec call = 2; + public static final int CALL_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_; + public boolean hasCall() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() { + return call_; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() { + return call_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCall()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCall().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, call_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, call_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasCall() == other.hasCall()); + if (hasCall()) { + result = result && getCall() + .equals(other.getCall()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasCall()) { + hash = (37 * hash) + CALL_FIELD_NUMBER; + hash = (53 * hash) + getCall().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getCallFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (callBuilder_ == null) { + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + } else { + callBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (callBuilder_ == null) { + result.call_ = call_; + } else { + result.call_ = callBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasCall()) { + mergeCall(other.getCall()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasCall()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getCall().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(); + if (hasCall()) { + subBuilder.mergeFrom(getCall()); + } + input.readMessage(subBuilder, extensionRegistry); + setCall(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Exec call = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> callBuilder_; + public boolean hasCall() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() { + if (callBuilder_ == null) { + return call_; + } else { + return callBuilder_.getMessage(); + } + } + public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { + if (callBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + call_ = value; + onChanged(); + } else { + callBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setCall( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder builderForValue) { + if (callBuilder_ == null) { + call_ = builderForValue.build(); + onChanged(); + } else { + callBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) { + if (callBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) { + call_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(call_).mergeFrom(value).buildPartial(); + } else { + call_ = value; + } + onChanged(); + } else { + callBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearCall() { + if (callBuilder_ == null) { + call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance(); + onChanged(); + } else { + callBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder getCallBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getCallFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() { + if (callBuilder_ != null) { + return callBuilder_.getMessageOrBuilder(); + } else { + return call_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> + getCallFieldBuilder() { + if (callBuilder_ == null) { + callBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>( + call_, + getParentForChildren(), + isClean()); + call_ = null; + } + return callBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ExecCoprocessorRequest) + } + + static { + defaultInstance = new ExecCoprocessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ExecCoprocessorRequest) + } + + public interface ExecCoprocessorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .NameBytesPair value = 1; + boolean hasValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); + } + public static final class ExecCoprocessorResponse extends + com.google.protobuf.GeneratedMessage + implements ExecCoprocessorResponseOrBuilder { + // Use ExecCoprocessorResponse.newBuilder() to construct. + private ExecCoprocessorResponse(Builder builder) { + super(builder); + } + private ExecCoprocessorResponse(boolean noInit) {} + + private static final ExecCoprocessorResponse defaultInstance; + public static ExecCoprocessorResponse getDefaultInstance() { + return defaultInstance; + } + + public ExecCoprocessorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .NameBytesPair value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + return value_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + return value_; + } + + private void initFields() { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + if (!getValue().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) obj; + + boolean result = true; + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getValueFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()) return this; + if (other.hasValue()) { + mergeValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasValue()) { + + return false; + } + if (!getValue().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasValue()) { + subBuilder.mergeFrom(getValue()); + } + input.readMessage(subBuilder, extensionRegistry); + setValue(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .NameBytesPair value = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + if (valueBuilder_ == null) { + return value_; + } else { + return valueBuilder_.getMessage(); + } + } + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + valueBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setValue( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + valueBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + value_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + valueBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getValueFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); + } else { + return value_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + value_, + getParentForChildren(), + isClean()); + value_ = null; + } + return valueBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ExecCoprocessorResponse) + } + + static { + defaultInstance = new ExecCoprocessorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ExecCoprocessorResponse) + } + + public interface ActionResultOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .NameBytesPair value = 1; + boolean hasValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); + + // optional .NameBytesPair exception = 2; + boolean hasException(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); + } + public static final class ActionResult extends + com.google.protobuf.GeneratedMessage + implements ActionResultOrBuilder { + // Use ActionResult.newBuilder() to construct. + private ActionResult(Builder builder) { + super(builder); + } + private ActionResult(boolean noInit) {} + + private static final ActionResult defaultInstance; + public static ActionResult getDefaultInstance() { + return defaultInstance; + } + + public ActionResult getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; + } + + private int bitField0_; + // optional .NameBytesPair value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + return value_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + return value_; + } + + // optional .NameBytesPair exception = 2; + public static final int EXCEPTION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_; + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + return exception_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + return exception_; + } + + private void initFields() { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasValue()) { + if (!getValue().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasException()) { + if (!getException().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, value_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, exception_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, value_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, exception_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) obj; + + boolean result = true; + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && (hasException() == other.hasException()); + if (hasException()) { + result = result && getException() + .equals(other.getException()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + if (hasException()) { + hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getException().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ActionResult_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getValueFieldBuilder(); + getExceptionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (exceptionBuilder_ == null) { + result.exception_ = exception_; + } else { + result.exception_ = exceptionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()) return this; + if (other.hasValue()) { + mergeValue(other.getValue()); + } + if (other.hasException()) { + mergeException(other.getException()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasValue()) { + if (!getValue().isInitialized()) { + + return false; + } + } + if (hasException()) { + if (!getException().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasValue()) { + subBuilder.mergeFrom(getValue()); + } + input.readMessage(subBuilder, extensionRegistry); + setValue(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + if (hasException()) { + subBuilder.mergeFrom(getException()); + } + input.readMessage(subBuilder, extensionRegistry); + setException(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // optional .NameBytesPair value = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + public boolean hasValue() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + if (valueBuilder_ == null) { + return value_; + } else { + return valueBuilder_.getMessage(); + } + } + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + valueBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setValue( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + valueBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (valueBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + value_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + valueBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getValueFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); + } else { + return value_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + value_, + getParentForChildren(), + isClean()); + value_ = null; + } + return valueBuilder_; + } + + // optional .NameBytesPair exception = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + if (exceptionBuilder_ == null) { + return exception_; + } else { + return exceptionBuilder_.getMessage(); + } + } + public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + exception_ = value; + onChanged(); + } else { + exceptionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setException( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (exceptionBuilder_ == null) { + exception_ = builderForValue.build(); + onChanged(); + } else { + exceptionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + exception_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); + } else { + exception_ = value; + } + onChanged(); + } else { + exceptionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearException() { + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getExceptionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + if (exceptionBuilder_ != null) { + return exceptionBuilder_.getMessageOrBuilder(); + } else { + return exception_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getExceptionFieldBuilder() { + if (exceptionBuilder_ == null) { + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + exception_, + getParentForChildren(), + isClean()); + exception_ = null; + } + return exceptionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ActionResult) + } + + static { + defaultInstance = new ActionResult(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ActionResult) + } + + public interface MultiRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated .NameBytesPair action = 2; + java.util.List + getActionList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAction(int index); + int getActionCount(); + java.util.List + getActionOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getActionOrBuilder( + int index); + + // optional bool atomic = 3; + boolean hasAtomic(); + boolean getAtomic(); + } + public static final class MultiRequest extends + com.google.protobuf.GeneratedMessage + implements MultiRequestOrBuilder { + // Use MultiRequest.newBuilder() to construct. + private MultiRequest(Builder builder) { + super(builder); + } + private MultiRequest(boolean noInit) {} + + private static final MultiRequest defaultInstance; + public static MultiRequest getDefaultInstance() { + return defaultInstance; + } + + public MultiRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated .NameBytesPair action = 2; + public static final int ACTION_FIELD_NUMBER = 2; + private java.util.List action_; + public java.util.List getActionList() { + return action_; + } + public java.util.List + getActionOrBuilderList() { + return action_; + } + public int getActionCount() { + return action_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAction(int index) { + return action_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getActionOrBuilder( + int index) { + return action_.get(index); + } + + // optional bool atomic = 3; + public static final int ATOMIC_FIELD_NUMBER = 3; + private boolean atomic_; + public boolean hasAtomic() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getAtomic() { + return atomic_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + action_ = java.util.Collections.emptyList(); + atomic_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getActionCount(); i++) { + if (!getAction(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < action_.size(); i++) { + output.writeMessage(2, action_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(3, atomic_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + for (int i = 0; i < action_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, action_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, atomic_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getActionList() + .equals(other.getActionList()); + result = result && (hasAtomic() == other.hasAtomic()); + if (hasAtomic()) { + result = result && (getAtomic() + == other.getAtomic()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getActionCount() > 0) { + hash = (37 * hash) + ACTION_FIELD_NUMBER; + hash = (53 * hash) + getActionList().hashCode(); + } + if (hasAtomic()) { + hash = (37 * hash) + ATOMIC_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getAtomic()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getActionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (actionBuilder_ == null) { + action_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + actionBuilder_.clear(); + } + atomic_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (actionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + action_ = java.util.Collections.unmodifiableList(action_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.action_ = action_; + } else { + result.action_ = actionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.atomic_ = atomic_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (actionBuilder_ == null) { + if (!other.action_.isEmpty()) { + if (action_.isEmpty()) { + action_ = other.action_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureActionIsMutable(); + action_.addAll(other.action_); + } + onChanged(); + } + } else { + if (!other.action_.isEmpty()) { + if (actionBuilder_.isEmpty()) { + actionBuilder_.dispose(); + actionBuilder_ = null; + action_ = other.action_; + bitField0_ = (bitField0_ & ~0x00000002); + actionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getActionFieldBuilder() : null; + } else { + actionBuilder_.addAllMessages(other.action_); + } + } + } + if (other.hasAtomic()) { + setAtomic(other.getAtomic()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + for (int i = 0; i < getActionCount(); i++) { + if (!getAction(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAction(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + atomic_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated .NameBytesPair action = 2; + private java.util.List action_ = + java.util.Collections.emptyList(); + private void ensureActionIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + action_ = new java.util.ArrayList(action_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> actionBuilder_; + + public java.util.List getActionList() { + if (actionBuilder_ == null) { + return java.util.Collections.unmodifiableList(action_); + } else { + return actionBuilder_.getMessageList(); + } + } + public int getActionCount() { + if (actionBuilder_ == null) { + return action_.size(); + } else { + return actionBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAction(int index) { + if (actionBuilder_ == null) { + return action_.get(index); + } else { + return actionBuilder_.getMessage(index); + } + } + public Builder setAction( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (actionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureActionIsMutable(); + action_.set(index, value); + onChanged(); + } else { + actionBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAction( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.set(index, builderForValue.build()); + onChanged(); + } else { + actionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (actionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureActionIsMutable(); + action_.add(value); + onChanged(); + } else { + actionBuilder_.addMessage(value); + } + return this; + } + public Builder addAction( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (actionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureActionIsMutable(); + action_.add(index, value); + onChanged(); + } else { + actionBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAction( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.add(builderForValue.build()); + onChanged(); + } else { + actionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAction( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.add(index, builderForValue.build()); + onChanged(); + } else { + actionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAction( + java.lang.Iterable values) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + super.addAll(values, action_); + onChanged(); + } else { + actionBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAction() { + if (actionBuilder_ == null) { + action_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + actionBuilder_.clear(); + } + return this; + } + public Builder removeAction(int index) { + if (actionBuilder_ == null) { + ensureActionIsMutable(); + action_.remove(index); + onChanged(); + } else { + actionBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getActionBuilder( + int index) { + return getActionFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getActionOrBuilder( + int index) { + if (actionBuilder_ == null) { + return action_.get(index); } else { + return actionBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getActionOrBuilderList() { + if (actionBuilder_ != null) { + return actionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(action_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addActionBuilder() { + return getActionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addActionBuilder( + int index) { + return getActionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); + } + public java.util.List + getActionBuilderList() { + return getActionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getActionFieldBuilder() { + if (actionBuilder_ == null) { + actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + action_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + action_ = null; + } + return actionBuilder_; + } + + // optional bool atomic = 3; + private boolean atomic_ ; + public boolean hasAtomic() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getAtomic() { + return atomic_; + } + public Builder setAtomic(boolean value) { + bitField0_ |= 0x00000004; + atomic_ = value; + onChanged(); + return this; + } + public Builder clearAtomic() { + bitField0_ = (bitField0_ & ~0x00000004); + atomic_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MultiRequest) + } + + static { + defaultInstance = new MultiRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiRequest) + } + + public interface MultiResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .ActionResult result = 1; + java.util.List + getResultList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index); + int getResultCount(); + java.util.List + getResultOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + int index); + } + public static final class MultiResponse extends + com.google.protobuf.GeneratedMessage + implements MultiResponseOrBuilder { + // Use MultiResponse.newBuilder() to construct. + private MultiResponse(Builder builder) { + super(builder); + } + private MultiResponse(boolean noInit) {} + + private static final MultiResponse defaultInstance; + public static MultiResponse getDefaultInstance() { + return defaultInstance; + } + + public MultiResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; + } + + // repeated .ActionResult result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + public java.util.List getResultList() { + return result_; + } + public java.util.List + getResultOrBuilderList() { + return result_; + } + public int getResultCount() { + return result_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { + return result_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + int index) { + return result_.get(index); + } + + private void initFields() { + result_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < result_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this); + int from_bitField0_ = bitField0_; + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + } else { + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + resultBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultFieldBuilder() : null; + } else { + resultBuilder_.addAllMessages(other.result_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addResult(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .ActionResult result = 1; + private java.util.List result_ = + java.util.Collections.emptyList(); + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(result_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> resultBuilder_; + + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); + } else { + return resultBuilder_.getMessageList(); + } + } + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); + } else { + return resultBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); + } else { + return resultBuilder_.getMessage(index); + } + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + } else { + resultBuilder_.setMessage(index, value); + } + return this; + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } else { + resultBuilder_.addMessage(value); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(index, value); + onChanged(); + } else { + resultBuilder_.addMessage(index, value); + } + return this; + } + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + } else { + resultBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultBuilder_.clear(); + } + return this; + } + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); + onChanged(); + } else { + resultBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder getResultBuilder( + int index) { + return getResultFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder getResultOrBuilder( + int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(result_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder addResultBuilder( + int index) { + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.getDefaultInstance()); + } + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResultOrBuilder>( + result_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiResponse) + } + + static { + defaultInstance = new MultiResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiResponse) + } + + public static abstract class ClientService + implements com.google.protobuf.Service { + protected ClientService() {} + + public interface Interface { + public abstract void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new ClientService() { + @java.lang.Override + public void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done) { + impl.get(controller, request, done); + } + + @java.lang.Override + public void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done) { + impl.mutate(controller, request, done); + } + + @java.lang.Override + public void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done) { + impl.scan(controller, request, done); + } + + @java.lang.Override + public void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done) { + impl.lockRow(controller, request, done); + } + + @java.lang.Override + public void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done) { + impl.unlockRow(controller, request, done); + } + + @java.lang.Override + public void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done) { + impl.bulkLoadHFile(controller, request, done); + } + + @java.lang.Override + public void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done) { + impl.execCoprocessor(controller, request, done); + } + + @java.lang.Override + public void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done) { + impl.multi(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request); + case 1: + return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request); + case 2: + return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request); + case 3: + return impl.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)request); + case 4: + return impl.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)request); + case 5: + return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request); + case 6: + return impl.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request); + case 7: + return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 7: + this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance())); + } + + public void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance())); + } + + public void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance())); + } + + public void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance())); + } + + public void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance())); + } + + public void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance())); + } + + public void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance())); + } + + public void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Column_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Column_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Get_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Get_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Result_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Result_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Condition_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Condition_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_ColumnValue_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_ColumnValue_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MutateRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MutateRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MutateResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MutateResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Scan_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Scan_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ScanRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ScanRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ScanResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ScanResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LockRowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LockRowRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LockRowResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LockRowResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UnlockRowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UnlockRowRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UnlockRowResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UnlockRowResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Exec_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Exec_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ExecCoprocessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ExecCoprocessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ExecCoprocessorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ExecCoprocessorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ActionResult_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ActionResult_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014Client.proto\032\013hbase.proto\"+\n\006Column\022\016\n" + + "\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"\320\001\n\003Get" + + "\022\013\n\003row\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007.Column\022!" + + "\n\tattribute\030\003 \003(\0132\016.NameBytesPair\022\016\n\006loc" + + "kId\030\004 \001(\004\022\036\n\006filter\030\005 \001(\0132\016.NameBytesPai" + + "r\022\035\n\ttimeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxV" + + "ersions\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004t" + + "rue\"\037\n\006Result\022\025\n\rkeyValueBytes\030\001 \003(\014\"r\n\n" + + "GetRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" + + "fier\022\021\n\003get\030\002 \002(\0132\004.Get\022\030\n\020closestRowBef", + "ore\030\003 \001(\010\022\025\n\rexistenceOnly\030\004 \001(\010\"6\n\013GetR" + + "esponse\022\027\n\006result\030\001 \001(\0132\007.Result\022\016\n\006exis" + + "ts\030\002 \001(\010\"\200\002\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016\n\006f" + + "amily\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022+\n\013compar" + + "eType\030\004 \002(\0162\026.Condition.CompareType\022\"\n\nc" + + "omparator\030\005 \002(\0132\016.NameBytesPair\"r\n\013Compa" + + "reType\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005E" + + "QUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUA" + + "L\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_OP\020\006\"\306\004\n\006Mutate\022\013" + + "\n\003row\030\001 \002(\014\022&\n\nmutateType\030\002 \002(\0162\022.Mutate", + ".MutateType\022(\n\013columnValue\030\003 \003(\0132\023.Mutat" + + "e.ColumnValue\022!\n\tattribute\030\004 \003(\0132\016.NameB" + + "ytesPair\022\021\n\ttimestamp\030\005 \001(\004\022\016\n\006lockId\030\006 " + + "\001(\004\022\030\n\nwriteToWAL\030\007 \001(\010:\004true\022\035\n\ttimeRan" + + "ge\030\n \001(\0132\n.TimeRange\032\310\001\n\013ColumnValue\022\016\n\006" + + "family\030\001 \002(\014\022:\n\016qualifierValue\030\002 \003(\0132\".M" + + "utate.ColumnValue.QualifierValue\032m\n\016Qual" + + "ifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005value\030\002" + + " \001(\014\022\021\n\ttimestamp\030\003 \001(\004\022&\n\ndeleteType\030\004 " + + "\001(\0162\022.Mutate.DeleteType\"<\n\nMutateType\022\n\n", + "\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006DEL" + + "ETE\020\003\"U\n\nDeleteType\022\026\n\022DELETE_ONE_VERSIO" + + "N\020\000\022\034\n\030DELETE_MULTIPLE_VERSIONS\020\001\022\021\n\rDEL" + + "ETE_FAMILY\020\002\"i\n\rMutateRequest\022 \n\006region\030" + + "\001 \002(\0132\020.RegionSpecifier\022\027\n\006mutate\030\002 \002(\0132" + + "\007.Mutate\022\035\n\tcondition\030\003 \001(\0132\n.Condition\"" + + "<\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132\007.Resu" + + "lt\022\021\n\tprocessed\030\002 \001(\010\"\352\001\n\004Scan\022\027\n\006column" + + "\030\001 \003(\0132\007.Column\022!\n\tattribute\030\002 \003(\0132\016.Nam" + + "eBytesPair\022\020\n\010startRow\030\003 \001(\014\022\017\n\007stopRow\030", + "\004 \001(\014\022\036\n\006filter\030\005 \001(\0132\016.NameBytesPair\022\035\n" + + "\ttimeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxVersi" + + "ons\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004true\022" + + "\021\n\tbatchSize\030\t \001(\r\"\203\001\n\013ScanRequest\022 \n\006re" + + "gion\030\001 \001(\0132\020.RegionSpecifier\022\023\n\004scan\030\002 \001" + + "(\0132\005.Scan\022\021\n\tscannerId\030\003 \001(\004\022\024\n\014numberOf" + + "Rows\030\004 \001(\r\022\024\n\014closeScanner\030\005 \001(\010\"\\\n\014Scan" + + "Response\022\027\n\006result\030\001 \003(\0132\007.Result\022\021\n\tsca" + + "nnerId\030\002 \001(\004\022\023\n\013moreResults\030\003 \001(\010\022\013\n\003ttl" + + "\030\004 \001(\r\"?\n\016LockRowRequest\022 \n\006region\030\001 \002(\013", + "2\020.RegionSpecifier\022\013\n\003row\030\002 \003(\014\".\n\017LockR" + + "owResponse\022\016\n\006lockId\030\001 \002(\004\022\013\n\003ttl\030\002 \001(\r\"" + + "D\n\020UnlockRowRequest\022 \n\006region\030\001 \002(\0132\020.Re" + + "gionSpecifier\022\016\n\006lockId\030\002 \002(\004\"\023\n\021UnlockR" + + "owResponse\"\232\001\n\024BulkLoadHFileRequest\022 \n\006r" + + "egion\030\001 \002(\0132\020.RegionSpecifier\0224\n\nfamilyP" + + "ath\030\002 \003(\0132 .BulkLoadHFileRequest.FamilyP" + + "ath\032*\n\nFamilyPath\022\016\n\006family\030\001 \002(\014\022\014\n\004pat" + + "h\030\002 \002(\t\"\'\n\025BulkLoadHFileResponse\022\016\n\006load" + + "ed\030\001 \002(\010\"\203\001\n\004Exec\022\013\n\003row\030\001 \002(\014\022\024\n\014protoc", + "olName\030\002 \002(\t\022\022\n\nmethodName\030\003 \002(\t\022!\n\010prop" + + "erty\030\004 \003(\0132\017.NameStringPair\022!\n\tparameter" + + "\030\005 \003(\0132\016.NameBytesPair\"O\n\026ExecCoprocesso" + + "rRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + + "er\022\023\n\004call\030\002 \002(\0132\005.Exec\"8\n\027ExecCoprocess" + + "orResponse\022\035\n\005value\030\001 \002(\0132\016.NameBytesPai" + + "r\"P\n\014ActionResult\022\035\n\005value\030\001 \001(\0132\016.NameB" + + "ytesPair\022!\n\texception\030\002 \001(\0132\016.NameBytesP" + + "air\"`\n\014MultiRequest\022 \n\006region\030\001 \002(\0132\020.Re" + + "gionSpecifier\022\036\n\006action\030\002 \003(\0132\016.NameByte", + "sPair\022\016\n\006atomic\030\003 \001(\010\".\n\rMultiResponse\022\035" + + "\n\006result\030\001 \003(\0132\r.ActionResult2\221\003\n\rClient" + + "Service\022 \n\003get\022\013.GetRequest\032\014.GetRespons" + + "e\022)\n\006mutate\022\016.MutateRequest\032\017.MutateResp" + + "onse\022#\n\004scan\022\014.ScanRequest\032\r.ScanRespons" + + "e\022,\n\007lockRow\022\017.LockRowRequest\032\020.LockRowR" + + "esponse\0222\n\tunlockRow\022\021.UnlockRowRequest\032" + + "\022.UnlockRowResponse\022>\n\rbulkLoadHFile\022\025.B" + + "ulkLoadHFileRequest\032\026.BulkLoadHFileRespo" + + "nse\022D\n\017execCoprocessor\022\027.ExecCoprocessor", + "Request\032\030.ExecCoprocessorResponse\022&\n\005mul" + + "ti\022\r.MultiRequest\032\016.MultiResponseBB\n*org" + + ".apache.hadoop.hbase.protobuf.generatedB" + + "\014ClientProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_Column_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Column_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Column_descriptor, + new java.lang.String[] { "Family", "Qualifier", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); + internal_static_Get_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_Get_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Get_descriptor, + new java.lang.String[] { "Row", "Column", "Attribute", "LockId", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); + internal_static_Result_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_Result_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Result_descriptor, + new java.lang.String[] { "KeyValueBytes", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); + internal_static_GetRequest_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_GetRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRequest_descriptor, + new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); + internal_static_GetResponse_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_GetResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetResponse_descriptor, + new java.lang.String[] { "Result", "Exists", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); + internal_static_Condition_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_Condition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Condition_descriptor, + new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); + internal_static_Mutate_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_Mutate_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_descriptor, + new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Attribute", "Timestamp", "LockId", "WriteToWAL", "TimeRange", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.Builder.class); + internal_static_Mutate_ColumnValue_descriptor = + internal_static_Mutate_descriptor.getNestedTypes().get(0); + internal_static_Mutate_ColumnValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_ColumnValue_descriptor, + new java.lang.String[] { "Family", "QualifierValue", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.Builder.class); + internal_static_Mutate_ColumnValue_QualifierValue_descriptor = + internal_static_Mutate_ColumnValue_descriptor.getNestedTypes().get(0); + internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_ColumnValue_QualifierValue_descriptor, + new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.ColumnValue.QualifierValue.Builder.class); + internal_static_MutateRequest_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_MutateRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MutateRequest_descriptor, + new java.lang.String[] { "Region", "Mutate", "Condition", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); + internal_static_MutateResponse_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_MutateResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MutateResponse_descriptor, + new java.lang.String[] { "Result", "Processed", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); + internal_static_Scan_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_Scan_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Scan_descriptor, + new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); + internal_static_ScanRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_ScanRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ScanRequest_descriptor, + new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); + internal_static_ScanResponse_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_ScanResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ScanResponse_descriptor, + new java.lang.String[] { "Result", "ScannerId", "MoreResults", "Ttl", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class); + internal_static_LockRowRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_LockRowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LockRowRequest_descriptor, + new java.lang.String[] { "Region", "Row", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest.Builder.class); + internal_static_LockRowResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_LockRowResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LockRowResponse_descriptor, + new java.lang.String[] { "LockId", "Ttl", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse.Builder.class); + internal_static_UnlockRowRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_UnlockRowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UnlockRowRequest_descriptor, + new java.lang.String[] { "Region", "LockId", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest.Builder.class); + internal_static_UnlockRowResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_UnlockRowResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UnlockRowResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse.Builder.class); + internal_static_BulkLoadHFileRequest_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_BulkLoadHFileRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileRequest_descriptor, + new java.lang.String[] { "Region", "FamilyPath", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class); + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor = + internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); + internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor, + new java.lang.String[] { "Family", "Path", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); + internal_static_BulkLoadHFileResponse_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_BulkLoadHFileResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileResponse_descriptor, + new java.lang.String[] { "Loaded", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); + internal_static_Exec_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_Exec_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Exec_descriptor, + new java.lang.String[] { "Row", "ProtocolName", "MethodName", "Property", "Parameter", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder.class); + internal_static_ExecCoprocessorRequest_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_ExecCoprocessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ExecCoprocessorRequest_descriptor, + new java.lang.String[] { "Region", "Call", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.Builder.class); + internal_static_ExecCoprocessorResponse_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_ExecCoprocessorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ExecCoprocessorResponse_descriptor, + new java.lang.String[] { "Value", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.Builder.class); + internal_static_ActionResult_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_ActionResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ActionResult_descriptor, + new java.lang.String[] { "Value", "Exception", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class); + internal_static_MultiRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_MultiRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiRequest_descriptor, + new java.lang.String[] { "Region", "Action", "Atomic", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class); + internal_static_MultiResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_MultiResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiResponse_descriptor, + new java.lang.String[] { "Result", }, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java index 4026da0..efcf74d 100644 --- src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java +++ src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -3080,6 +3080,1018 @@ public final class HBaseProtos { // @@protoc_insertion_point(class_scope:ServerName) } + public interface NameStringPairOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // required string value = 2; + boolean hasValue(); + String getValue(); + } + public static final class NameStringPair extends + com.google.protobuf.GeneratedMessage + implements NameStringPairOrBuilder { + // Use NameStringPair.newBuilder() to construct. + private NameStringPair(Builder builder) { + super(builder); + } + private NameStringPair(boolean noInit) {} + + private static final NameStringPair defaultInstance; + public static NameStringPair getDefaultInstance() { + return defaultInstance; + } + + public NameStringPair getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private java.lang.Object value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getValue() { + java.lang.Object ref = value_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + value_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + name_ = ""; + value_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getValueBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getValueBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // required string value = 2; + private java.lang.Object value_ = ""; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getValue() { + java.lang.Object ref = value_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + value_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setValue(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + void setValue(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:NameStringPair) + } + + static { + defaultInstance = new NameStringPair(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NameStringPair) + } + + public interface NameBytesPairOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // optional bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class NameBytesPair extends + com.google.protobuf.GeneratedMessage + implements NameBytesPairOrBuilder { + // Use NameBytesPair.newBuilder() to construct. + private NameBytesPair(Builder builder) { + super(builder); + } + private NameBytesPair(boolean noInit) {} + + private static final NameBytesPair defaultInstance; + public static NameBytesPair getDefaultInstance() { + return defaultInstance; + } + + public NameBytesPair getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + name_ = ""; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // optional bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:NameBytesPair) + } + + static { + defaultInstance = new NameBytesPair(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NameBytesPair) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_RegionInfo_descriptor; private static @@ -3105,6 +4117,16 @@ public final class HBaseProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ServerName_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NameStringPair_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NameStringPair_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NameBytesPair_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NameBytesPair_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -3126,11 +4148,14 @@ public final class HBaseProtos { "(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004", "\022\031\n\007keyType\030\005 \001(\0162\010.KeyType\022\r\n\005value\030\006 \001" + "(\014\"?\n\nServerName\022\020\n\010hostName\030\001 \002(\t\022\014\n\004po" + - "rt\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004*_\n\007KeyType\022\013" + - "\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELE" + - "TE_COLUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMU" + - "M\020\377\001B>\n*org.apache.hadoop.hbase.protobuf" + - ".generatedB\013HBaseProtosH\001\240\001\001" + "rt\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004\"-\n\016NameStrin" + + "gPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\",\n\rNa" + + "meBytesPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014" + + "*_\n\007KeyType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DEL" + + "ETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE_FAMIL" + + "Y\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*org.apache.hadoop.h" + + "base.protobuf.generatedB\013HBaseProtosH\001\240\001" + + "\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -3177,6 +4202,22 @@ public final class HBaseProtos { new java.lang.String[] { "HostName", "Port", "StartCode", }, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); + internal_static_NameStringPair_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_NameStringPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NameStringPair_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); + internal_static_NameBytesPair_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_NameBytesPair_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NameBytesPair_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); return null; } }; diff --git src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java deleted file mode 100644 index 2169310..0000000 --- src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java +++ /dev/null @@ -1,15454 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: RegionAdmin.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class RegionAdminProtos { - private RegionAdminProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface GetRegionInfoRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - } - public static final class GetRegionInfoRequest extends - com.google.protobuf.GeneratedMessage - implements GetRegionInfoRequestOrBuilder { - // Use GetRegionInfoRequest.newBuilder() to construct. - private GetRegionInfoRequest(Builder builder) { - super(builder); - } - private GetRegionInfoRequest(boolean noInit) {} - - private static final GetRegionInfoRequest defaultInstance; - public static GetRegionInfoRequest getDefaultInstance() { - return defaultInstance; - } - - public GetRegionInfoRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest) - } - - static { - defaultInstance = new GetRegionInfoRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetRegionInfoRequest) - } - - public interface GetRegionInfoResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionInfo regionInfo = 1; - boolean hasRegionInfo(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - } - public static final class GetRegionInfoResponse extends - com.google.protobuf.GeneratedMessage - implements GetRegionInfoResponseOrBuilder { - // Use GetRegionInfoResponse.newBuilder() to construct. - private GetRegionInfoResponse(Builder builder) { - super(builder); - } - private GetRegionInfoResponse(boolean noInit) {} - - private static final GetRegionInfoResponse defaultInstance; - public static GetRegionInfoResponse getDefaultInstance() { - return defaultInstance; - } - - public GetRegionInfoResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionInfo regionInfo = 1; - public static final int REGIONINFO_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; - public boolean hasRegionInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - return regionInfo_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - return regionInfo_; - } - - private void initFields() { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegionInfo()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegionInfo().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionInfo_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) obj; - - boolean result = true; - result = result && (hasRegionInfo() == other.hasRegionInfo()); - if (hasRegionInfo()) { - result = result && getRegionInfo() - .equals(other.getRegionInfo()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegionInfo()) { - hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; - hash = (53 * hash) + getRegionInfo().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionInfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - } else { - regionInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionInfoBuilder_ == null) { - result.regionInfo_ = regionInfo_; - } else { - result.regionInfo_ = regionInfoBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; - if (other.hasRegionInfo()) { - mergeRegionInfo(other.getRegionInfo()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegionInfo()) { - - return false; - } - if (!getRegionInfo().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - if (hasRegionInfo()) { - subBuilder.mergeFrom(getRegionInfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegionInfo(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionInfo regionInfo = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; - public boolean hasRegionInfo() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - if (regionInfoBuilder_ == null) { - return regionInfo_; - } else { - return regionInfoBuilder_.getMessage(); - } - } - public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - regionInfo_ = value; - onChanged(); - } else { - regionInfoBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegionInfo( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - regionInfo_ = builderForValue.build(); - onChanged(); - } else { - regionInfoBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { - regionInfo_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); - } else { - regionInfo_ = value; - } - onChanged(); - } else { - regionInfoBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegionInfo() { - if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - onChanged(); - } else { - regionInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionInfoFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - if (regionInfoBuilder_ != null) { - return regionInfoBuilder_.getMessageOrBuilder(); - } else { - return regionInfo_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> - getRegionInfoFieldBuilder() { - if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, - getParentForChildren(), - isClean()); - regionInfo_ = null; - } - return regionInfoBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse) - } - - static { - defaultInstance = new GetRegionInfoResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetRegionInfoResponse) - } - - public interface GetStoreFileListRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // repeated bytes columnFamily = 2; - java.util.List getColumnFamilyList(); - int getColumnFamilyCount(); - com.google.protobuf.ByteString getColumnFamily(int index); - } - public static final class GetStoreFileListRequest extends - com.google.protobuf.GeneratedMessage - implements GetStoreFileListRequestOrBuilder { - // Use GetStoreFileListRequest.newBuilder() to construct. - private GetStoreFileListRequest(Builder builder) { - super(builder); - } - private GetStoreFileListRequest(boolean noInit) {} - - private static final GetStoreFileListRequest defaultInstance; - public static GetStoreFileListRequest getDefaultInstance() { - return defaultInstance; - } - - public GetStoreFileListRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // repeated bytes columnFamily = 2; - public static final int COLUMNFAMILY_FIELD_NUMBER = 2; - private java.util.List columnFamily_; - public java.util.List - getColumnFamilyList() { - return columnFamily_; - } - public int getColumnFamilyCount() { - return columnFamily_.size(); - } - public com.google.protobuf.ByteString getColumnFamily(int index) { - return columnFamily_.get(index); - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - columnFamily_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - for (int i = 0; i < columnFamily_.size(); i++) { - output.writeBytes(2, columnFamily_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - { - int dataSize = 0; - for (int i = 0; i < columnFamily_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(columnFamily_.get(i)); - } - size += dataSize; - size += 1 * getColumnFamilyList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && getColumnFamilyList() - .equals(other.getColumnFamilyList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (getColumnFamilyCount() > 0) { - hash = (37 * hash) + COLUMNFAMILY_FIELD_NUMBER; - hash = (53 * hash) + getColumnFamilyList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - columnFamily_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - columnFamily_ = java.util.Collections.unmodifiableList(columnFamily_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.columnFamily_ = columnFamily_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (!other.columnFamily_.isEmpty()) { - if (columnFamily_.isEmpty()) { - columnFamily_ = other.columnFamily_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureColumnFamilyIsMutable(); - columnFamily_.addAll(other.columnFamily_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - ensureColumnFamilyIsMutable(); - columnFamily_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // repeated bytes columnFamily = 2; - private java.util.List columnFamily_ = java.util.Collections.emptyList();; - private void ensureColumnFamilyIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - columnFamily_ = new java.util.ArrayList(columnFamily_); - bitField0_ |= 0x00000002; - } - } - public java.util.List - getColumnFamilyList() { - return java.util.Collections.unmodifiableList(columnFamily_); - } - public int getColumnFamilyCount() { - return columnFamily_.size(); - } - public com.google.protobuf.ByteString getColumnFamily(int index) { - return columnFamily_.get(index); - } - public Builder setColumnFamily( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnFamilyIsMutable(); - columnFamily_.set(index, value); - onChanged(); - return this; - } - public Builder addColumnFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnFamilyIsMutable(); - columnFamily_.add(value); - onChanged(); - return this; - } - public Builder addAllColumnFamily( - java.lang.Iterable values) { - ensureColumnFamilyIsMutable(); - super.addAll(values, columnFamily_); - onChanged(); - return this; - } - public Builder clearColumnFamily() { - columnFamily_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetStoreFileListRequest) - } - - static { - defaultInstance = new GetStoreFileListRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetStoreFileListRequest) - } - - public interface GetStoreFileListResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated string storeFile = 1; - java.util.List getStoreFileList(); - int getStoreFileCount(); - String getStoreFile(int index); - } - public static final class GetStoreFileListResponse extends - com.google.protobuf.GeneratedMessage - implements GetStoreFileListResponseOrBuilder { - // Use GetStoreFileListResponse.newBuilder() to construct. - private GetStoreFileListResponse(Builder builder) { - super(builder); - } - private GetStoreFileListResponse(boolean noInit) {} - - private static final GetStoreFileListResponse defaultInstance; - public static GetStoreFileListResponse getDefaultInstance() { - return defaultInstance; - } - - public GetStoreFileListResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; - } - - // repeated string storeFile = 1; - public static final int STOREFILE_FIELD_NUMBER = 1; - private com.google.protobuf.LazyStringList storeFile_; - public java.util.List - getStoreFileList() { - return storeFile_; - } - public int getStoreFileCount() { - return storeFile_.size(); - } - public String getStoreFile(int index) { - return storeFile_.get(index); - } - - private void initFields() { - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < storeFile_.size(); i++) { - output.writeBytes(1, storeFile_.getByteString(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < storeFile_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(storeFile_.getByteString(i)); - } - size += dataSize; - size += 1 * getStoreFileList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) obj; - - boolean result = true; - result = result && getStoreFileList() - .equals(other.getStoreFileList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getStoreFileCount() > 0) { - hash = (37 * hash) + STOREFILE_FIELD_NUMBER; - hash = (53 * hash) + getStoreFileList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( - storeFile_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.storeFile_ = storeFile_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance()) return this; - if (!other.storeFile_.isEmpty()) { - if (storeFile_.isEmpty()) { - storeFile_ = other.storeFile_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureStoreFileIsMutable(); - storeFile_.addAll(other.storeFile_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureStoreFileIsMutable(); - storeFile_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // repeated string storeFile = 1; - private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureStoreFileIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getStoreFileList() { - return java.util.Collections.unmodifiableList(storeFile_); - } - public int getStoreFileCount() { - return storeFile_.size(); - } - public String getStoreFile(int index) { - return storeFile_.get(index); - } - public Builder setStoreFile( - int index, String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureStoreFileIsMutable(); - storeFile_.set(index, value); - onChanged(); - return this; - } - public Builder addStoreFile(String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureStoreFileIsMutable(); - storeFile_.add(value); - onChanged(); - return this; - } - public Builder addAllStoreFile( - java.lang.Iterable values) { - ensureStoreFileIsMutable(); - super.addAll(values, storeFile_); - onChanged(); - return this; - } - public Builder clearStoreFile() { - storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - void addStoreFile(com.google.protobuf.ByteString value) { - ensureStoreFileIsMutable(); - storeFile_.add(value); - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:GetStoreFileListResponse) - } - - static { - defaultInstance = new GetStoreFileListResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetStoreFileListResponse) - } - - public interface GetOnlineRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class GetOnlineRegionRequest extends - com.google.protobuf.GeneratedMessage - implements GetOnlineRegionRequestOrBuilder { - // Use GetOnlineRegionRequest.newBuilder() to construct. - private GetOnlineRegionRequest(Builder builder) { - super(builder); - } - private GetOnlineRegionRequest(boolean noInit) {} - - private static final GetOnlineRegionRequest defaultInstance; - public static GetOnlineRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public GetOnlineRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:GetOnlineRegionRequest) - } - - static { - defaultInstance = new GetOnlineRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetOnlineRegionRequest) - } - - public interface GetOnlineRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .RegionInfo regionInfo = 1; - java.util.List - getRegionInfoList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index); - int getRegionInfoCount(); - java.util.List - getRegionInfoOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( - int index); - } - public static final class GetOnlineRegionResponse extends - com.google.protobuf.GeneratedMessage - implements GetOnlineRegionResponseOrBuilder { - // Use GetOnlineRegionResponse.newBuilder() to construct. - private GetOnlineRegionResponse(Builder builder) { - super(builder); - } - private GetOnlineRegionResponse(boolean noInit) {} - - private static final GetOnlineRegionResponse defaultInstance; - public static GetOnlineRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public GetOnlineRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; - } - - // repeated .RegionInfo regionInfo = 1; - public static final int REGIONINFO_FIELD_NUMBER = 1; - private java.util.List regionInfo_; - public java.util.List getRegionInfoList() { - return regionInfo_; - } - public java.util.List - getRegionInfoOrBuilderList() { - return regionInfo_; - } - public int getRegionInfoCount() { - return regionInfo_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { - return regionInfo_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( - int index) { - return regionInfo_.get(index); - } - - private void initFields() { - regionInfo_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getRegionInfoCount(); i++) { - if (!getRegionInfo(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < regionInfo_.size(); i++) { - output.writeMessage(1, regionInfo_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < regionInfo_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) obj; - - boolean result = true; - result = result && getRegionInfoList() - .equals(other.getRegionInfoList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getRegionInfoCount() > 0) { - hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; - hash = (53 * hash) + getRegionInfoList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionInfoFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionInfoBuilder_ == null) { - regionInfo_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - regionInfoBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse(this); - int from_bitField0_ = bitField0_; - if (regionInfoBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.regionInfo_ = regionInfo_; - } else { - result.regionInfo_ = regionInfoBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; - if (regionInfoBuilder_ == null) { - if (!other.regionInfo_.isEmpty()) { - if (regionInfo_.isEmpty()) { - regionInfo_ = other.regionInfo_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureRegionInfoIsMutable(); - regionInfo_.addAll(other.regionInfo_); - } - onChanged(); - } - } else { - if (!other.regionInfo_.isEmpty()) { - if (regionInfoBuilder_.isEmpty()) { - regionInfoBuilder_.dispose(); - regionInfoBuilder_ = null; - regionInfo_ = other.regionInfo_; - bitField0_ = (bitField0_ & ~0x00000001); - regionInfoBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getRegionInfoFieldBuilder() : null; - } else { - regionInfoBuilder_.addAllMessages(other.regionInfo_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getRegionInfoCount(); i++) { - if (!getRegionInfo(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRegionInfo(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .RegionInfo regionInfo = 1; - private java.util.List regionInfo_ = - java.util.Collections.emptyList(); - private void ensureRegionInfoIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - regionInfo_ = new java.util.ArrayList(regionInfo_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; - - public java.util.List getRegionInfoList() { - if (regionInfoBuilder_ == null) { - return java.util.Collections.unmodifiableList(regionInfo_); - } else { - return regionInfoBuilder_.getMessageList(); - } - } - public int getRegionInfoCount() { - if (regionInfoBuilder_ == null) { - return regionInfo_.size(); - } else { - return regionInfoBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { - if (regionInfoBuilder_ == null) { - return regionInfo_.get(index); - } else { - return regionInfoBuilder_.getMessage(index); - } - } - public Builder setRegionInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionInfoIsMutable(); - regionInfo_.set(index, value); - onChanged(); - } else { - regionInfoBuilder_.setMessage(index, value); - } - return this; - } - public Builder setRegionInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - regionInfo_.set(index, builderForValue.build()); - onChanged(); - } else { - regionInfoBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionInfoIsMutable(); - regionInfo_.add(value); - onChanged(); - } else { - regionInfoBuilder_.addMessage(value); - } - return this; - } - public Builder addRegionInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { - if (regionInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionInfoIsMutable(); - regionInfo_.add(index, value); - onChanged(); - } else { - regionInfoBuilder_.addMessage(index, value); - } - return this; - } - public Builder addRegionInfo( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - regionInfo_.add(builderForValue.build()); - onChanged(); - } else { - regionInfoBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addRegionInfo( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - regionInfo_.add(index, builderForValue.build()); - onChanged(); - } else { - regionInfoBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllRegionInfo( - java.lang.Iterable values) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - super.addAll(values, regionInfo_); - onChanged(); - } else { - regionInfoBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearRegionInfo() { - if (regionInfoBuilder_ == null) { - regionInfo_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - regionInfoBuilder_.clear(); - } - return this; - } - public Builder removeRegionInfo(int index) { - if (regionInfoBuilder_ == null) { - ensureRegionInfoIsMutable(); - regionInfo_.remove(index); - onChanged(); - } else { - regionInfoBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder( - int index) { - return getRegionInfoFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( - int index) { - if (regionInfoBuilder_ == null) { - return regionInfo_.get(index); } else { - return regionInfoBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getRegionInfoOrBuilderList() { - if (regionInfoBuilder_ != null) { - return regionInfoBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(regionInfo_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() { - return getRegionInfoFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder( - int index) { - return getRegionInfoFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); - } - public java.util.List - getRegionInfoBuilderList() { - return getRegionInfoFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> - getRegionInfoFieldBuilder() { - if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - regionInfo_ = null; - } - return regionInfoBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GetOnlineRegionResponse) - } - - static { - defaultInstance = new GetOnlineRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetOnlineRegionResponse) - } - - public interface OpenRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .RegionSpecifier region = 1; - java.util.List - getRegionList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index); - int getRegionCount(); - java.util.List - getRegionOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( - int index); - - // optional uint32 versionOfOfflineNode = 2; - boolean hasVersionOfOfflineNode(); - int getVersionOfOfflineNode(); - } - public static final class OpenRegionRequest extends - com.google.protobuf.GeneratedMessage - implements OpenRegionRequestOrBuilder { - // Use OpenRegionRequest.newBuilder() to construct. - private OpenRegionRequest(Builder builder) { - super(builder); - } - private OpenRegionRequest(boolean noInit) {} - - private static final OpenRegionRequest defaultInstance; - public static OpenRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public OpenRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // repeated .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private java.util.List region_; - public java.util.List getRegionList() { - return region_; - } - public java.util.List - getRegionOrBuilderList() { - return region_; - } - public int getRegionCount() { - return region_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) { - return region_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( - int index) { - return region_.get(index); - } - - // optional uint32 versionOfOfflineNode = 2; - public static final int VERSIONOFOFFLINENODE_FIELD_NUMBER = 2; - private int versionOfOfflineNode_; - public boolean hasVersionOfOfflineNode() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getVersionOfOfflineNode() { - return versionOfOfflineNode_; - } - - private void initFields() { - region_ = java.util.Collections.emptyList(); - versionOfOfflineNode_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getRegionCount(); i++) { - if (!getRegion(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < region_.size(); i++) { - output.writeMessage(1, region_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(2, versionOfOfflineNode_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < region_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, versionOfOfflineNode_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest) obj; - - boolean result = true; - result = result && getRegionList() - .equals(other.getRegionList()); - result = result && (hasVersionOfOfflineNode() == other.hasVersionOfOfflineNode()); - if (hasVersionOfOfflineNode()) { - result = result && (getVersionOfOfflineNode() - == other.getVersionOfOfflineNode()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getRegionCount() > 0) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegionList().hashCode(); - } - if (hasVersionOfOfflineNode()) { - hash = (37 * hash) + VERSIONOFOFFLINENODE_FIELD_NUMBER; - hash = (53 * hash) + getVersionOfOfflineNode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - regionBuilder_.clear(); - } - versionOfOfflineNode_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - region_ = java.util.Collections.unmodifiableList(region_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000001; - } - result.versionOfOfflineNode_ = versionOfOfflineNode_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance()) return this; - if (regionBuilder_ == null) { - if (!other.region_.isEmpty()) { - if (region_.isEmpty()) { - region_ = other.region_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureRegionIsMutable(); - region_.addAll(other.region_); - } - onChanged(); - } - } else { - if (!other.region_.isEmpty()) { - if (regionBuilder_.isEmpty()) { - regionBuilder_.dispose(); - regionBuilder_ = null; - region_ = other.region_; - bitField0_ = (bitField0_ & ~0x00000001); - regionBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getRegionFieldBuilder() : null; - } else { - regionBuilder_.addAllMessages(other.region_); - } - } - } - if (other.hasVersionOfOfflineNode()) { - setVersionOfOfflineNode(other.getVersionOfOfflineNode()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getRegionCount(); i++) { - if (!getRegion(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - versionOfOfflineNode_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // repeated .RegionSpecifier region = 1; - private java.util.List region_ = - java.util.Collections.emptyList(); - private void ensureRegionIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - region_ = new java.util.ArrayList(region_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - - public java.util.List getRegionList() { - if (regionBuilder_ == null) { - return java.util.Collections.unmodifiableList(region_); - } else { - return regionBuilder_.getMessageList(); - } - } - public int getRegionCount() { - if (regionBuilder_ == null) { - return region_.size(); - } else { - return regionBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) { - if (regionBuilder_ == null) { - return region_.get(index); - } else { - return regionBuilder_.getMessage(index); - } - } - public Builder setRegion( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionIsMutable(); - region_.set(index, value); - onChanged(); - } else { - regionBuilder_.setMessage(index, value); - } - return this; - } - public Builder setRegion( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - ensureRegionIsMutable(); - region_.set(index, builderForValue.build()); - onChanged(); - } else { - regionBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionIsMutable(); - region_.add(value); - onChanged(); - } else { - regionBuilder_.addMessage(value); - } - return this; - } - public Builder addRegion( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionIsMutable(); - region_.add(index, value); - onChanged(); - } else { - regionBuilder_.addMessage(index, value); - } - return this; - } - public Builder addRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - ensureRegionIsMutable(); - region_.add(builderForValue.build()); - onChanged(); - } else { - regionBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addRegion( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - ensureRegionIsMutable(); - region_.add(index, builderForValue.build()); - onChanged(); - } else { - regionBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllRegion( - java.lang.Iterable values) { - if (regionBuilder_ == null) { - ensureRegionIsMutable(); - super.addAll(values, region_); - onChanged(); - } else { - regionBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - regionBuilder_.clear(); - } - return this; - } - public Builder removeRegion(int index) { - if (regionBuilder_ == null) { - ensureRegionIsMutable(); - region_.remove(index); - onChanged(); - } else { - regionBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder( - int index) { - return getRegionFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( - int index) { - if (regionBuilder_ == null) { - return region_.get(index); } else { - return regionBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getRegionOrBuilderList() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(region_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder() { - return getRegionFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder( - int index) { - return getRegionFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()); - } - public java.util.List - getRegionBuilderList() { - return getRegionFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional uint32 versionOfOfflineNode = 2; - private int versionOfOfflineNode_ ; - public boolean hasVersionOfOfflineNode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getVersionOfOfflineNode() { - return versionOfOfflineNode_; - } - public Builder setVersionOfOfflineNode(int value) { - bitField0_ |= 0x00000002; - versionOfOfflineNode_ = value; - onChanged(); - return this; - } - public Builder clearVersionOfOfflineNode() { - bitField0_ = (bitField0_ & ~0x00000002); - versionOfOfflineNode_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:OpenRegionRequest) - } - - static { - defaultInstance = new OpenRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:OpenRegionRequest) - } - - public interface OpenRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; - java.util.List getOpeningStateList(); - int getOpeningStateCount(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); - } - public static final class OpenRegionResponse extends - com.google.protobuf.GeneratedMessage - implements OpenRegionResponseOrBuilder { - // Use OpenRegionResponse.newBuilder() to construct. - private OpenRegionResponse(Builder builder) { - super(builder); - } - private OpenRegionResponse(boolean noInit) {} - - private static final OpenRegionResponse defaultInstance; - public static OpenRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public OpenRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; - } - - public enum RegionOpeningState - implements com.google.protobuf.ProtocolMessageEnum { - OPENED(0, 0), - ALREADY_OPENED(1, 1), - FAILED_OPENING(2, 2), - ; - - public static final int OPENED_VALUE = 0; - public static final int ALREADY_OPENED_VALUE = 1; - public static final int FAILED_OPENING_VALUE = 2; - - - public final int getNumber() { return value; } - - public static RegionOpeningState valueOf(int value) { - switch (value) { - case 0: return OPENED; - case 1: return ALREADY_OPENED; - case 2: return FAILED_OPENING; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public RegionOpeningState findValueByNumber(int number) { - return RegionOpeningState.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); - } - - private static final RegionOpeningState[] VALUES = { - OPENED, ALREADY_OPENED, FAILED_OPENING, - }; - - public static RegionOpeningState valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private RegionOpeningState(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState) - } - - // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; - public static final int OPENINGSTATE_FIELD_NUMBER = 1; - private java.util.List openingState_; - public java.util.List getOpeningStateList() { - return openingState_; - } - public int getOpeningStateCount() { - return openingState_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { - return openingState_.get(index); - } - - private void initFields() { - openingState_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < openingState_.size(); i++) { - output.writeEnum(1, openingState_.get(i).getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < openingState_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeEnumSizeNoTag(openingState_.get(i).getNumber()); - } - size += dataSize; - size += 1 * openingState_.size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) obj; - - boolean result = true; - result = result && getOpeningStateList() - .equals(other.getOpeningStateList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getOpeningStateCount() > 0) { - hash = (37 * hash) + OPENINGSTATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnumList(getOpeningStateList()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - openingState_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - openingState_ = java.util.Collections.unmodifiableList(openingState_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.openingState_ = openingState_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance()) return this; - if (!other.openingState_.isEmpty()) { - if (openingState_.isEmpty()) { - openingState_ = other.openingState_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureOpeningStateIsMutable(); - openingState_.addAll(other.openingState_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addOpeningState(value); - } - break; - } - case 10: { - int length = input.readRawVarint32(); - int oldLimit = input.pushLimit(length); - while(input.getBytesUntilLimit() > 0) { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - addOpeningState(value); - } - } - input.popLimit(oldLimit); - break; - } - } - } - } - - private int bitField0_; - - // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; - private java.util.List openingState_ = - java.util.Collections.emptyList(); - private void ensureOpeningStateIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - openingState_ = new java.util.ArrayList(openingState_); - bitField0_ |= 0x00000001; - } - } - public java.util.List getOpeningStateList() { - return java.util.Collections.unmodifiableList(openingState_); - } - public int getOpeningStateCount() { - return openingState_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { - return openingState_.get(index); - } - public Builder setOpeningState( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value) { - if (value == null) { - throw new NullPointerException(); - } - ensureOpeningStateIsMutable(); - openingState_.set(index, value); - onChanged(); - return this; - } - public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value) { - if (value == null) { - throw new NullPointerException(); - } - ensureOpeningStateIsMutable(); - openingState_.add(value); - onChanged(); - return this; - } - public Builder addAllOpeningState( - java.lang.Iterable values) { - ensureOpeningStateIsMutable(); - super.addAll(values, openingState_); - onChanged(); - return this; - } - public Builder clearOpeningState() { - openingState_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:OpenRegionResponse) - } - - static { - defaultInstance = new OpenRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:OpenRegionResponse) - } - - public interface CloseRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional uint32 versionOfClosingNode = 2; - boolean hasVersionOfClosingNode(); - int getVersionOfClosingNode(); - - // optional bool transitionInZK = 3 [default = true]; - boolean hasTransitionInZK(); - boolean getTransitionInZK(); - } - public static final class CloseRegionRequest extends - com.google.protobuf.GeneratedMessage - implements CloseRegionRequestOrBuilder { - // Use CloseRegionRequest.newBuilder() to construct. - private CloseRegionRequest(Builder builder) { - super(builder); - } - private CloseRegionRequest(boolean noInit) {} - - private static final CloseRegionRequest defaultInstance; - public static CloseRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public CloseRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional uint32 versionOfClosingNode = 2; - public static final int VERSIONOFCLOSINGNODE_FIELD_NUMBER = 2; - private int versionOfClosingNode_; - public boolean hasVersionOfClosingNode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getVersionOfClosingNode() { - return versionOfClosingNode_; - } - - // optional bool transitionInZK = 3 [default = true]; - public static final int TRANSITIONINZK_FIELD_NUMBER = 3; - private boolean transitionInZK_; - public boolean hasTransitionInZK() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getTransitionInZK() { - return transitionInZK_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - versionOfClosingNode_ = 0; - transitionInZK_ = true; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, versionOfClosingNode_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBool(3, transitionInZK_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, versionOfClosingNode_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, transitionInZK_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasVersionOfClosingNode() == other.hasVersionOfClosingNode()); - if (hasVersionOfClosingNode()) { - result = result && (getVersionOfClosingNode() - == other.getVersionOfClosingNode()); - } - result = result && (hasTransitionInZK() == other.hasTransitionInZK()); - if (hasTransitionInZK()) { - result = result && (getTransitionInZK() - == other.getTransitionInZK()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasVersionOfClosingNode()) { - hash = (37 * hash) + VERSIONOFCLOSINGNODE_FIELD_NUMBER; - hash = (53 * hash) + getVersionOfClosingNode(); - } - if (hasTransitionInZK()) { - hash = (37 * hash) + TRANSITIONINZK_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getTransitionInZK()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - versionOfClosingNode_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - transitionInZK_ = true; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.versionOfClosingNode_ = versionOfClosingNode_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.transitionInZK_ = transitionInZK_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasVersionOfClosingNode()) { - setVersionOfClosingNode(other.getVersionOfClosingNode()); - } - if (other.hasTransitionInZK()) { - setTransitionInZK(other.getTransitionInZK()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - versionOfClosingNode_ = input.readUInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - transitionInZK_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional uint32 versionOfClosingNode = 2; - private int versionOfClosingNode_ ; - public boolean hasVersionOfClosingNode() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getVersionOfClosingNode() { - return versionOfClosingNode_; - } - public Builder setVersionOfClosingNode(int value) { - bitField0_ |= 0x00000002; - versionOfClosingNode_ = value; - onChanged(); - return this; - } - public Builder clearVersionOfClosingNode() { - bitField0_ = (bitField0_ & ~0x00000002); - versionOfClosingNode_ = 0; - onChanged(); - return this; - } - - // optional bool transitionInZK = 3 [default = true]; - private boolean transitionInZK_ = true; - public boolean hasTransitionInZK() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getTransitionInZK() { - return transitionInZK_; - } - public Builder setTransitionInZK(boolean value) { - bitField0_ |= 0x00000004; - transitionInZK_ = value; - onChanged(); - return this; - } - public Builder clearTransitionInZK() { - bitField0_ = (bitField0_ & ~0x00000004); - transitionInZK_ = true; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CloseRegionRequest) - } - - static { - defaultInstance = new CloseRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CloseRegionRequest) - } - - public interface CloseRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool closed = 1; - boolean hasClosed(); - boolean getClosed(); - } - public static final class CloseRegionResponse extends - com.google.protobuf.GeneratedMessage - implements CloseRegionResponseOrBuilder { - // Use CloseRegionResponse.newBuilder() to construct. - private CloseRegionResponse(Builder builder) { - super(builder); - } - private CloseRegionResponse(boolean noInit) {} - - private static final CloseRegionResponse defaultInstance; - public static CloseRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public CloseRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; - } - - private int bitField0_; - // required bool closed = 1; - public static final int CLOSED_FIELD_NUMBER = 1; - private boolean closed_; - public boolean hasClosed() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getClosed() { - return closed_; - } - - private void initFields() { - closed_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasClosed()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, closed_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, closed_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) obj; - - boolean result = true; - result = result && (hasClosed() == other.hasClosed()); - if (hasClosed()) { - result = result && (getClosed() - == other.getClosed()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasClosed()) { - hash = (37 * hash) + CLOSED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getClosed()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - closed_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.closed_ = closed_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance()) return this; - if (other.hasClosed()) { - setClosed(other.getClosed()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasClosed()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - closed_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool closed = 1; - private boolean closed_ ; - public boolean hasClosed() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getClosed() { - return closed_; - } - public Builder setClosed(boolean value) { - bitField0_ |= 0x00000001; - closed_ = value; - onChanged(); - return this; - } - public Builder clearClosed() { - bitField0_ = (bitField0_ & ~0x00000001); - closed_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CloseRegionResponse) - } - - static { - defaultInstance = new CloseRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CloseRegionResponse) - } - - public interface FlushRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional uint64 ifOlderThanTs = 2; - boolean hasIfOlderThanTs(); - long getIfOlderThanTs(); - } - public static final class FlushRegionRequest extends - com.google.protobuf.GeneratedMessage - implements FlushRegionRequestOrBuilder { - // Use FlushRegionRequest.newBuilder() to construct. - private FlushRegionRequest(Builder builder) { - super(builder); - } - private FlushRegionRequest(boolean noInit) {} - - private static final FlushRegionRequest defaultInstance; - public static FlushRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public FlushRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional uint64 ifOlderThanTs = 2; - public static final int IFOLDERTHANTS_FIELD_NUMBER = 2; - private long ifOlderThanTs_; - public boolean hasIfOlderThanTs() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getIfOlderThanTs() { - return ifOlderThanTs_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - ifOlderThanTs_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, ifOlderThanTs_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, ifOlderThanTs_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasIfOlderThanTs() == other.hasIfOlderThanTs()); - if (hasIfOlderThanTs()) { - result = result && (getIfOlderThanTs() - == other.getIfOlderThanTs()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasIfOlderThanTs()) { - hash = (37 * hash) + IFOLDERTHANTS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getIfOlderThanTs()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - ifOlderThanTs_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.ifOlderThanTs_ = ifOlderThanTs_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasIfOlderThanTs()) { - setIfOlderThanTs(other.getIfOlderThanTs()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - ifOlderThanTs_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional uint64 ifOlderThanTs = 2; - private long ifOlderThanTs_ ; - public boolean hasIfOlderThanTs() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getIfOlderThanTs() { - return ifOlderThanTs_; - } - public Builder setIfOlderThanTs(long value) { - bitField0_ |= 0x00000002; - ifOlderThanTs_ = value; - onChanged(); - return this; - } - public Builder clearIfOlderThanTs() { - bitField0_ = (bitField0_ & ~0x00000002); - ifOlderThanTs_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:FlushRegionRequest) - } - - static { - defaultInstance = new FlushRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FlushRegionRequest) - } - - public interface FlushRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 lastFlushTime = 1; - boolean hasLastFlushTime(); - long getLastFlushTime(); - - // optional bool flushed = 2; - boolean hasFlushed(); - boolean getFlushed(); - } - public static final class FlushRegionResponse extends - com.google.protobuf.GeneratedMessage - implements FlushRegionResponseOrBuilder { - // Use FlushRegionResponse.newBuilder() to construct. - private FlushRegionResponse(Builder builder) { - super(builder); - } - private FlushRegionResponse(boolean noInit) {} - - private static final FlushRegionResponse defaultInstance; - public static FlushRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public FlushRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 lastFlushTime = 1; - public static final int LASTFLUSHTIME_FIELD_NUMBER = 1; - private long lastFlushTime_; - public boolean hasLastFlushTime() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLastFlushTime() { - return lastFlushTime_; - } - - // optional bool flushed = 2; - public static final int FLUSHED_FIELD_NUMBER = 2; - private boolean flushed_; - public boolean hasFlushed() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getFlushed() { - return flushed_; - } - - private void initFields() { - lastFlushTime_ = 0L; - flushed_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLastFlushTime()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, lastFlushTime_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, flushed_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, lastFlushTime_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, flushed_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) obj; - - boolean result = true; - result = result && (hasLastFlushTime() == other.hasLastFlushTime()); - if (hasLastFlushTime()) { - result = result && (getLastFlushTime() - == other.getLastFlushTime()); - } - result = result && (hasFlushed() == other.hasFlushed()); - if (hasFlushed()) { - result = result && (getFlushed() - == other.getFlushed()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLastFlushTime()) { - hash = (37 * hash) + LASTFLUSHTIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLastFlushTime()); - } - if (hasFlushed()) { - hash = (37 * hash) + FLUSHED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getFlushed()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - lastFlushTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - flushed_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.lastFlushTime_ = lastFlushTime_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.flushed_ = flushed_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance()) return this; - if (other.hasLastFlushTime()) { - setLastFlushTime(other.getLastFlushTime()); - } - if (other.hasFlushed()) { - setFlushed(other.getFlushed()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLastFlushTime()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lastFlushTime_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - flushed_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 lastFlushTime = 1; - private long lastFlushTime_ ; - public boolean hasLastFlushTime() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLastFlushTime() { - return lastFlushTime_; - } - public Builder setLastFlushTime(long value) { - bitField0_ |= 0x00000001; - lastFlushTime_ = value; - onChanged(); - return this; - } - public Builder clearLastFlushTime() { - bitField0_ = (bitField0_ & ~0x00000001); - lastFlushTime_ = 0L; - onChanged(); - return this; - } - - // optional bool flushed = 2; - private boolean flushed_ ; - public boolean hasFlushed() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getFlushed() { - return flushed_; - } - public Builder setFlushed(boolean value) { - bitField0_ |= 0x00000002; - flushed_ = value; - onChanged(); - return this; - } - public Builder clearFlushed() { - bitField0_ = (bitField0_ & ~0x00000002); - flushed_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:FlushRegionResponse) - } - - static { - defaultInstance = new FlushRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FlushRegionResponse) - } - - public interface SplitRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional bytes splitPoint = 2; - boolean hasSplitPoint(); - com.google.protobuf.ByteString getSplitPoint(); - } - public static final class SplitRegionRequest extends - com.google.protobuf.GeneratedMessage - implements SplitRegionRequestOrBuilder { - // Use SplitRegionRequest.newBuilder() to construct. - private SplitRegionRequest(Builder builder) { - super(builder); - } - private SplitRegionRequest(boolean noInit) {} - - private static final SplitRegionRequest defaultInstance; - public static SplitRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public SplitRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional bytes splitPoint = 2; - public static final int SPLITPOINT_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString splitPoint_; - public boolean hasSplitPoint() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSplitPoint() { - return splitPoint_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - splitPoint_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, splitPoint_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, splitPoint_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasSplitPoint() == other.hasSplitPoint()); - if (hasSplitPoint()) { - result = result && getSplitPoint() - .equals(other.getSplitPoint()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasSplitPoint()) { - hash = (37 * hash) + SPLITPOINT_FIELD_NUMBER; - hash = (53 * hash) + getSplitPoint().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - splitPoint_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.splitPoint_ = splitPoint_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasSplitPoint()) { - setSplitPoint(other.getSplitPoint()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - bitField0_ |= 0x00000002; - splitPoint_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional bytes splitPoint = 2; - private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasSplitPoint() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getSplitPoint() { - return splitPoint_; - } - public Builder setSplitPoint(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - splitPoint_ = value; - onChanged(); - return this; - } - public Builder clearSplitPoint() { - bitField0_ = (bitField0_ & ~0x00000002); - splitPoint_ = getDefaultInstance().getSplitPoint(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:SplitRegionRequest) - } - - static { - defaultInstance = new SplitRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SplitRegionRequest) - } - - public interface SplitRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class SplitRegionResponse extends - com.google.protobuf.GeneratedMessage - implements SplitRegionResponseOrBuilder { - // Use SplitRegionResponse.newBuilder() to construct. - private SplitRegionResponse(Builder builder) { - super(builder); - } - private SplitRegionResponse(boolean noInit) {} - - private static final SplitRegionResponse defaultInstance; - public static SplitRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public SplitRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:SplitRegionResponse) - } - - static { - defaultInstance = new SplitRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SplitRegionResponse) - } - - public interface CompactRegionRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // optional bool major = 2; - boolean hasMajor(); - boolean getMajor(); - } - public static final class CompactRegionRequest extends - com.google.protobuf.GeneratedMessage - implements CompactRegionRequestOrBuilder { - // Use CompactRegionRequest.newBuilder() to construct. - private CompactRegionRequest(Builder builder) { - super(builder); - } - private CompactRegionRequest(boolean noInit) {} - - private static final CompactRegionRequest defaultInstance; - public static CompactRegionRequest getDefaultInstance() { - return defaultInstance; - } - - public CompactRegionRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // optional bool major = 2; - public static final int MAJOR_FIELD_NUMBER = 2; - private boolean major_; - public boolean hasMajor() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getMajor() { - return major_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - major_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, major_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, major_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasMajor() == other.hasMajor()); - if (hasMajor()) { - result = result && (getMajor() - == other.getMajor()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasMajor()) { - hash = (37 * hash) + MAJOR_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMajor()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - major_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.major_ = major_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasMajor()) { - setMajor(other.getMajor()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - major_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // optional bool major = 2; - private boolean major_ ; - public boolean hasMajor() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getMajor() { - return major_; - } - public Builder setMajor(boolean value) { - bitField0_ |= 0x00000002; - major_ = value; - onChanged(); - return this; - } - public Builder clearMajor() { - bitField0_ = (bitField0_ & ~0x00000002); - major_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CompactRegionRequest) - } - - static { - defaultInstance = new CompactRegionRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CompactRegionRequest) - } - - public interface CompactRegionResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class CompactRegionResponse extends - com.google.protobuf.GeneratedMessage - implements CompactRegionResponseOrBuilder { - // Use CompactRegionResponse.newBuilder() to construct. - private CompactRegionResponse(Builder builder) { - super(builder); - } - private CompactRegionResponse(boolean noInit) {} - - private static final CompactRegionResponse defaultInstance; - public static CompactRegionResponse getDefaultInstance() { - return defaultInstance; - } - - public CompactRegionResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:CompactRegionResponse) - } - - static { - defaultInstance = new CompactRegionResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CompactRegionResponse) - } - - public interface UUIDOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 leastSigBits = 1; - boolean hasLeastSigBits(); - long getLeastSigBits(); - - // required uint64 mostSigBits = 2; - boolean hasMostSigBits(); - long getMostSigBits(); - } - public static final class UUID extends - com.google.protobuf.GeneratedMessage - implements UUIDOrBuilder { - // Use UUID.newBuilder() to construct. - private UUID(Builder builder) { - super(builder); - } - private UUID(boolean noInit) {} - - private static final UUID defaultInstance; - public static UUID getDefaultInstance() { - return defaultInstance; - } - - public UUID getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 leastSigBits = 1; - public static final int LEASTSIGBITS_FIELD_NUMBER = 1; - private long leastSigBits_; - public boolean hasLeastSigBits() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLeastSigBits() { - return leastSigBits_; - } - - // required uint64 mostSigBits = 2; - public static final int MOSTSIGBITS_FIELD_NUMBER = 2; - private long mostSigBits_; - public boolean hasMostSigBits() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getMostSigBits() { - return mostSigBits_; - } - - private void initFields() { - leastSigBits_ = 0L; - mostSigBits_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLeastSigBits()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMostSigBits()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, leastSigBits_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, mostSigBits_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, leastSigBits_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, mostSigBits_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID) obj; - - boolean result = true; - result = result && (hasLeastSigBits() == other.hasLeastSigBits()); - if (hasLeastSigBits()) { - result = result && (getLeastSigBits() - == other.getLeastSigBits()); - } - result = result && (hasMostSigBits() == other.hasMostSigBits()); - if (hasMostSigBits()) { - result = result && (getMostSigBits() - == other.getMostSigBits()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLeastSigBits()) { - hash = (37 * hash) + LEASTSIGBITS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLeastSigBits()); - } - if (hasMostSigBits()) { - hash = (37 * hash) + MOSTSIGBITS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMostSigBits()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - leastSigBits_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - mostSigBits_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.leastSigBits_ = leastSigBits_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.mostSigBits_ = mostSigBits_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance()) return this; - if (other.hasLeastSigBits()) { - setLeastSigBits(other.getLeastSigBits()); - } - if (other.hasMostSigBits()) { - setMostSigBits(other.getMostSigBits()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLeastSigBits()) { - - return false; - } - if (!hasMostSigBits()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - leastSigBits_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - mostSigBits_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 leastSigBits = 1; - private long leastSigBits_ ; - public boolean hasLeastSigBits() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLeastSigBits() { - return leastSigBits_; - } - public Builder setLeastSigBits(long value) { - bitField0_ |= 0x00000001; - leastSigBits_ = value; - onChanged(); - return this; - } - public Builder clearLeastSigBits() { - bitField0_ = (bitField0_ & ~0x00000001); - leastSigBits_ = 0L; - onChanged(); - return this; - } - - // required uint64 mostSigBits = 2; - private long mostSigBits_ ; - public boolean hasMostSigBits() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getMostSigBits() { - return mostSigBits_; - } - public Builder setMostSigBits(long value) { - bitField0_ |= 0x00000002; - mostSigBits_ = value; - onChanged(); - return this; - } - public Builder clearMostSigBits() { - bitField0_ = (bitField0_ & ~0x00000002); - mostSigBits_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:UUID) - } - - static { - defaultInstance = new UUID(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UUID) - } - - public interface WALEntryOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .WALEntry.WALKey walKey = 1; - boolean hasWalKey(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder(); - - // required .WALEntry.WALEdit edit = 2; - boolean hasEdit(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); - } - public static final class WALEntry extends - com.google.protobuf.GeneratedMessage - implements WALEntryOrBuilder { - // Use WALEntry.newBuilder() to construct. - private WALEntry(Builder builder) { - super(builder); - } - private WALEntry(boolean noInit) {} - - private static final WALEntry defaultInstance; - public static WALEntry getDefaultInstance() { - return defaultInstance; - } - - public WALEntry getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_fieldAccessorTable; - } - - public interface WALKeyOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes encodedRegionName = 1; - boolean hasEncodedRegionName(); - com.google.protobuf.ByteString getEncodedRegionName(); - - // required bytes tableName = 2; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // required uint64 logSequenceNumber = 3; - boolean hasLogSequenceNumber(); - long getLogSequenceNumber(); - - // required uint64 writeTime = 4; - boolean hasWriteTime(); - long getWriteTime(); - - // optional .UUID clusterId = 5; - boolean hasClusterId(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); - } - public static final class WALKey extends - com.google.protobuf.GeneratedMessage - implements WALKeyOrBuilder { - // Use WALKey.newBuilder() to construct. - private WALKey(Builder builder) { - super(builder); - } - private WALKey(boolean noInit) {} - - private static final WALKey defaultInstance; - public static WALKey getDefaultInstance() { - return defaultInstance; - } - - public WALKey getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; - } - - private int bitField0_; - // required bytes encodedRegionName = 1; - public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString encodedRegionName_; - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; - } - - // required bytes tableName = 2; - public static final int TABLENAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required uint64 logSequenceNumber = 3; - public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3; - private long logSequenceNumber_; - public boolean hasLogSequenceNumber() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getLogSequenceNumber() { - return logSequenceNumber_; - } - - // required uint64 writeTime = 4; - public static final int WRITETIME_FIELD_NUMBER = 4; - private long writeTime_; - public boolean hasWriteTime() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getWriteTime() { - return writeTime_; - } - - // optional .UUID clusterId = 5; - public static final int CLUSTERID_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID clusterId_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId() { - return clusterId_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { - return clusterId_; - } - - private void initFields() { - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - tableName_ = com.google.protobuf.ByteString.EMPTY; - logSequenceNumber_ = 0L; - writeTime_ = 0L; - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasEncodedRegionName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasLogSequenceNumber()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasWriteTime()) { - memoizedIsInitialized = 0; - return false; - } - if (hasClusterId()) { - if (!getClusterId().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, encodedRegionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, logSequenceNumber_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt64(4, writeTime_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, clusterId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, encodedRegionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, logSequenceNumber_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(4, writeTime_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, clusterId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey) obj; - - boolean result = true; - result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); - if (hasEncodedRegionName()) { - result = result && getEncodedRegionName() - .equals(other.getEncodedRegionName()); - } - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); - if (hasLogSequenceNumber()) { - result = result && (getLogSequenceNumber() - == other.getLogSequenceNumber()); - } - result = result && (hasWriteTime() == other.hasWriteTime()); - if (hasWriteTime()) { - result = result && (getWriteTime() - == other.getWriteTime()); - } - result = result && (hasClusterId() == other.hasClusterId()); - if (hasClusterId()) { - result = result && getClusterId() - .equals(other.getClusterId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasEncodedRegionName()) { - hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getEncodedRegionName().hashCode(); - } - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasLogSequenceNumber()) { - hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLogSequenceNumber()); - } - if (hasWriteTime()) { - hash = (37 * hash) + WRITETIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getWriteTime()); - } - if (hasClusterId()) { - hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; - hash = (53 * hash) + getClusterId().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getClusterIdFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - logSequenceNumber_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - writeTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); - if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); - } else { - clusterIdBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.encodedRegionName_ = encodedRegionName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.logSequenceNumber_ = logSequenceNumber_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.writeTime_ = writeTime_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - if (clusterIdBuilder_ == null) { - result.clusterId_ = clusterId_; - } else { - result.clusterId_ = clusterIdBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; - if (other.hasEncodedRegionName()) { - setEncodedRegionName(other.getEncodedRegionName()); - } - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasLogSequenceNumber()) { - setLogSequenceNumber(other.getLogSequenceNumber()); - } - if (other.hasWriteTime()) { - setWriteTime(other.getWriteTime()); - } - if (other.hasClusterId()) { - mergeClusterId(other.getClusterId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasEncodedRegionName()) { - - return false; - } - if (!hasTableName()) { - - return false; - } - if (!hasLogSequenceNumber()) { - - return false; - } - if (!hasWriteTime()) { - - return false; - } - if (hasClusterId()) { - if (!getClusterId().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - encodedRegionName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - tableName_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - logSequenceNumber_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - writeTime_ = input.readUInt64(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder(); - if (hasClusterId()) { - subBuilder.mergeFrom(getClusterId()); - } - input.readMessage(subBuilder, extensionRegistry); - setClusterId(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes encodedRegionName = 1; - private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; - } - public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - encodedRegionName_ = value; - onChanged(); - return this; - } - public Builder clearEncodedRegionName() { - bitField0_ = (bitField0_ & ~0x00000001); - encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); - onChanged(); - return this; - } - - // required bytes tableName = 2; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000002); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // required uint64 logSequenceNumber = 3; - private long logSequenceNumber_ ; - public boolean hasLogSequenceNumber() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getLogSequenceNumber() { - return logSequenceNumber_; - } - public Builder setLogSequenceNumber(long value) { - bitField0_ |= 0x00000004; - logSequenceNumber_ = value; - onChanged(); - return this; - } - public Builder clearLogSequenceNumber() { - bitField0_ = (bitField0_ & ~0x00000004); - logSequenceNumber_ = 0L; - onChanged(); - return this; - } - - // required uint64 writeTime = 4; - private long writeTime_ ; - public boolean hasWriteTime() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getWriteTime() { - return writeTime_; - } - public Builder setWriteTime(long value) { - bitField0_ |= 0x00000008; - writeTime_ = value; - onChanged(); - return this; - } - public Builder clearWriteTime() { - bitField0_ = (bitField0_ & ~0x00000008); - writeTime_ = 0L; - onChanged(); - return this; - } - - // optional .UUID clusterId = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder> clusterIdBuilder_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId() { - if (clusterIdBuilder_ == null) { - return clusterId_; - } else { - return clusterIdBuilder_.getMessage(); - } - } - public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID value) { - if (clusterIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - clusterId_ = value; - onChanged(); - } else { - clusterIdBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setClusterId( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder builderForValue) { - if (clusterIdBuilder_ == null) { - clusterId_ = builderForValue.build(); - onChanged(); - } else { - clusterIdBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID value) { - if (clusterIdBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - clusterId_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance()) { - clusterId_ = - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); - } else { - clusterId_ = value; - } - onChanged(); - } else { - clusterIdBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearClusterId() { - if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); - onChanged(); - } else { - clusterIdBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder getClusterIdBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getClusterIdFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { - if (clusterIdBuilder_ != null) { - return clusterIdBuilder_.getMessageOrBuilder(); - } else { - return clusterId_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder> - getClusterIdFieldBuilder() { - if (clusterIdBuilder_ == null) { - clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder>( - clusterId_, - getParentForChildren(), - isClean()); - clusterId_ = null; - } - return clusterIdBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALKey) - } - - static { - defaultInstance = new WALKey(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALKey) - } - - public interface WALEditOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .KeyValue keyValue = 1; - java.util.List - getKeyValueList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index); - int getKeyValueCount(); - java.util.List - getKeyValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( - int index); - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - java.util.List - getFamilyScopeList(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); - int getFamilyScopeCount(); - java.util.List - getFamilyScopeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index); - } - public static final class WALEdit extends - com.google.protobuf.GeneratedMessage - implements WALEditOrBuilder { - // Use WALEdit.newBuilder() to construct. - private WALEdit(Builder builder) { - super(builder); - } - private WALEdit(boolean noInit) {} - - private static final WALEdit defaultInstance; - public static WALEdit getDefaultInstance() { - return defaultInstance; - } - - public WALEdit getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; - } - - public enum ScopeType - implements com.google.protobuf.ProtocolMessageEnum { - REPLICATION_SCOPE_LOCAL(0, 0), - REPLICATION_SCOPE_GLOBAL(1, 1), - ; - - public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; - public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; - - - public final int getNumber() { return value; } - - public static ScopeType valueOf(int value) { - switch (value) { - case 0: return REPLICATION_SCOPE_LOCAL; - case 1: return REPLICATION_SCOPE_GLOBAL; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public ScopeType findValueByNumber(int number) { - return ScopeType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); - } - - private static final ScopeType[] VALUES = { - REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, - }; - - public static ScopeType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private ScopeType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:WALEntry.WALEdit.ScopeType) - } - - public interface FamilyScopeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - boolean hasScopeType(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); - } - public static final class FamilyScope extends - com.google.protobuf.GeneratedMessage - implements FamilyScopeOrBuilder { - // Use FamilyScope.newBuilder() to construct. - private FamilyScope(Builder builder) { - super(builder); - } - private FamilyScope(boolean noInit) {} - - private static final FamilyScope defaultInstance; - public static FamilyScope getDefaultInstance() { - return defaultInstance; - } - - public FamilyScope getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - public static final int SCOPETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType scopeType_; - public boolean hasScopeType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { - return scopeType_; - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasScopeType()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, scopeType_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, scopeType_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasScopeType() == other.hasScopeType()); - if (hasScopeType()) { - result = result && - (getScopeType() == other.getScopeType()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasScopeType()) { - hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getScopeType()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.scopeType_ = scopeType_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasScopeType()) { - setScopeType(other.getScopeType()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - if (!hasScopeType()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - scopeType_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - public boolean hasScopeType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { - return scopeType_; - } - public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - scopeType_ = value; - onChanged(); - return this; - } - public Builder clearScopeType() { - bitField0_ = (bitField0_ & ~0x00000002); - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit.FamilyScope) - } - - static { - defaultInstance = new FamilyScope(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope) - } - - // repeated .KeyValue keyValue = 1; - public static final int KEYVALUE_FIELD_NUMBER = 1; - private java.util.List keyValue_; - public java.util.List getKeyValueList() { - return keyValue_; - } - public java.util.List - getKeyValueOrBuilderList() { - return keyValue_; - } - public int getKeyValueCount() { - return keyValue_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index) { - return keyValue_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( - int index) { - return keyValue_.get(index); - } - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - public static final int FAMILYSCOPE_FIELD_NUMBER = 2; - private java.util.List familyScope_; - public java.util.List getFamilyScopeList() { - return familyScope_; - } - public java.util.List - getFamilyScopeOrBuilderList() { - return familyScope_; - } - public int getFamilyScopeCount() { - return familyScope_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { - return familyScope_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index) { - return familyScope_.get(index); - } - - private void initFields() { - keyValue_ = java.util.Collections.emptyList(); - familyScope_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getKeyValueCount(); i++) { - if (!getKeyValue(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getFamilyScopeCount(); i++) { - if (!getFamilyScope(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < keyValue_.size(); i++) { - output.writeMessage(1, keyValue_.get(i)); - } - for (int i = 0; i < familyScope_.size(); i++) { - output.writeMessage(2, familyScope_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < keyValue_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, keyValue_.get(i)); - } - for (int i = 0; i < familyScope_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, familyScope_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit) obj; - - boolean result = true; - result = result && getKeyValueList() - .equals(other.getKeyValueList()); - result = result && getFamilyScopeList() - .equals(other.getFamilyScopeList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getKeyValueCount() > 0) { - hash = (37 * hash) + KEYVALUE_FIELD_NUMBER; - hash = (53 * hash) + getKeyValueList().hashCode(); - } - if (getFamilyScopeCount() > 0) { - hash = (37 * hash) + FAMILYSCOPE_FIELD_NUMBER; - hash = (53 * hash) + getFamilyScopeList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getKeyValueFieldBuilder(); - getFamilyScopeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (keyValueBuilder_ == null) { - keyValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - keyValueBuilder_.clear(); - } - if (familyScopeBuilder_ == null) { - familyScope_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - familyScopeBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit(this); - int from_bitField0_ = bitField0_; - if (keyValueBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - keyValue_ = java.util.Collections.unmodifiableList(keyValue_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.keyValue_ = keyValue_; - } else { - result.keyValue_ = keyValueBuilder_.build(); - } - if (familyScopeBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - familyScope_ = java.util.Collections.unmodifiableList(familyScope_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.familyScope_ = familyScope_; - } else { - result.familyScope_ = familyScopeBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; - if (keyValueBuilder_ == null) { - if (!other.keyValue_.isEmpty()) { - if (keyValue_.isEmpty()) { - keyValue_ = other.keyValue_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureKeyValueIsMutable(); - keyValue_.addAll(other.keyValue_); - } - onChanged(); - } - } else { - if (!other.keyValue_.isEmpty()) { - if (keyValueBuilder_.isEmpty()) { - keyValueBuilder_.dispose(); - keyValueBuilder_ = null; - keyValue_ = other.keyValue_; - bitField0_ = (bitField0_ & ~0x00000001); - keyValueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getKeyValueFieldBuilder() : null; - } else { - keyValueBuilder_.addAllMessages(other.keyValue_); - } - } - } - if (familyScopeBuilder_ == null) { - if (!other.familyScope_.isEmpty()) { - if (familyScope_.isEmpty()) { - familyScope_ = other.familyScope_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureFamilyScopeIsMutable(); - familyScope_.addAll(other.familyScope_); - } - onChanged(); - } - } else { - if (!other.familyScope_.isEmpty()) { - if (familyScopeBuilder_.isEmpty()) { - familyScopeBuilder_.dispose(); - familyScopeBuilder_ = null; - familyScope_ = other.familyScope_; - bitField0_ = (bitField0_ & ~0x00000002); - familyScopeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getFamilyScopeFieldBuilder() : null; - } else { - familyScopeBuilder_.addAllMessages(other.familyScope_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getKeyValueCount(); i++) { - if (!getKeyValue(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getFamilyScopeCount(); i++) { - if (!getFamilyScope(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addKeyValue(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFamilyScope(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .KeyValue keyValue = 1; - private java.util.List keyValue_ = - java.util.Collections.emptyList(); - private void ensureKeyValueIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - keyValue_ = new java.util.ArrayList(keyValue_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> keyValueBuilder_; - - public java.util.List getKeyValueList() { - if (keyValueBuilder_ == null) { - return java.util.Collections.unmodifiableList(keyValue_); - } else { - return keyValueBuilder_.getMessageList(); - } - } - public int getKeyValueCount() { - if (keyValueBuilder_ == null) { - return keyValue_.size(); - } else { - return keyValueBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index) { - if (keyValueBuilder_ == null) { - return keyValue_.get(index); - } else { - return keyValueBuilder_.getMessage(index); - } - } - public Builder setKeyValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (keyValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueIsMutable(); - keyValue_.set(index, value); - onChanged(); - } else { - keyValueBuilder_.setMessage(index, value); - } - return this; - } - public Builder setKeyValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - keyValue_.set(index, builderForValue.build()); - onChanged(); - } else { - keyValueBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addKeyValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (keyValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueIsMutable(); - keyValue_.add(value); - onChanged(); - } else { - keyValueBuilder_.addMessage(value); - } - return this; - } - public Builder addKeyValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (keyValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueIsMutable(); - keyValue_.add(index, value); - onChanged(); - } else { - keyValueBuilder_.addMessage(index, value); - } - return this; - } - public Builder addKeyValue( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - keyValue_.add(builderForValue.build()); - onChanged(); - } else { - keyValueBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addKeyValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - keyValue_.add(index, builderForValue.build()); - onChanged(); - } else { - keyValueBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllKeyValue( - java.lang.Iterable values) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - super.addAll(values, keyValue_); - onChanged(); - } else { - keyValueBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearKeyValue() { - if (keyValueBuilder_ == null) { - keyValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - keyValueBuilder_.clear(); - } - return this; - } - public Builder removeKeyValue(int index) { - if (keyValueBuilder_ == null) { - ensureKeyValueIsMutable(); - keyValue_.remove(index); - onChanged(); - } else { - keyValueBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder getKeyValueBuilder( - int index) { - return getKeyValueFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( - int index) { - if (keyValueBuilder_ == null) { - return keyValue_.get(index); } else { - return keyValueBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getKeyValueOrBuilderList() { - if (keyValueBuilder_ != null) { - return keyValueBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(keyValue_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addKeyValueBuilder() { - return getKeyValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addKeyValueBuilder( - int index) { - return getKeyValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); - } - public java.util.List - getKeyValueBuilderList() { - return getKeyValueFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> - getKeyValueFieldBuilder() { - if (keyValueBuilder_ == null) { - keyValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder>( - keyValue_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - keyValue_ = null; - } - return keyValueBuilder_; - } - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - private java.util.List familyScope_ = - java.util.Collections.emptyList(); - private void ensureFamilyScopeIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - familyScope_ = new java.util.ArrayList(familyScope_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; - - public java.util.List getFamilyScopeList() { - if (familyScopeBuilder_ == null) { - return java.util.Collections.unmodifiableList(familyScope_); - } else { - return familyScopeBuilder_.getMessageList(); - } - } - public int getFamilyScopeCount() { - if (familyScopeBuilder_ == null) { - return familyScope_.size(); - } else { - return familyScopeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { - if (familyScopeBuilder_ == null) { - return familyScope_.get(index); - } else { - return familyScopeBuilder_.getMessage(index); - } - } - public Builder setFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.set(index, value); - onChanged(); - } else { - familyScopeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.set(index, builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.add(value); - onChanged(); - } else { - familyScopeBuilder_.addMessage(value); - } - return this; - } - public Builder addFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.add(index, value); - onChanged(); - } else { - familyScopeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addFamilyScope( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.add(builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.add(index, builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllFamilyScope( - java.lang.Iterable values) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - super.addAll(values, familyScope_); - onChanged(); - } else { - familyScopeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearFamilyScope() { - if (familyScopeBuilder_ == null) { - familyScope_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - familyScopeBuilder_.clear(); - } - return this; - } - public Builder removeFamilyScope(int index) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.remove(index); - onChanged(); - } else { - familyScopeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( - int index) { - return getFamilyScopeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index) { - if (familyScopeBuilder_ == null) { - return familyScope_.get(index); } else { - return familyScopeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getFamilyScopeOrBuilderList() { - if (familyScopeBuilder_ != null) { - return familyScopeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(familyScope_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { - return getFamilyScopeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( - int index) { - return getFamilyScopeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); - } - public java.util.List - getFamilyScopeBuilderList() { - return getFamilyScopeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> - getFamilyScopeFieldBuilder() { - if (familyScopeBuilder_ == null) { - familyScopeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>( - familyScope_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - familyScope_ = null; - } - return familyScopeBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit) - } - - static { - defaultInstance = new WALEdit(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALEdit) - } - - private int bitField0_; - // required .WALEntry.WALKey walKey = 1; - public static final int WALKEY_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey walKey_; - public boolean hasWalKey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey() { - return walKey_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { - return walKey_; - } - - // required .WALEntry.WALEdit edit = 2; - public static final int EDIT_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit edit_; - public boolean hasEdit() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit() { - return edit_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { - return edit_; - } - - private void initFields() { - walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); - edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasWalKey()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasEdit()) { - memoizedIsInitialized = 0; - return false; - } - if (!getWalKey().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getEdit().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, walKey_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, edit_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, walKey_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, edit_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry) obj; - - boolean result = true; - result = result && (hasWalKey() == other.hasWalKey()); - if (hasWalKey()) { - result = result && getWalKey() - .equals(other.getWalKey()); - } - result = result && (hasEdit() == other.hasEdit()); - if (hasEdit()) { - result = result && getEdit() - .equals(other.getEdit()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasWalKey()) { - hash = (37 * hash) + WALKEY_FIELD_NUMBER; - hash = (53 * hash) + getWalKey().hashCode(); - } - if (hasEdit()) { - hash = (37 * hash) + EDIT_FIELD_NUMBER; - hash = (53 * hash) + getEdit().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getWalKeyFieldBuilder(); - getEditFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (walKeyBuilder_ == null) { - walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); - } else { - walKeyBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (editBuilder_ == null) { - edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); - } else { - editBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (walKeyBuilder_ == null) { - result.walKey_ = walKey_; - } else { - result.walKey_ = walKeyBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (editBuilder_ == null) { - result.edit_ = edit_; - } else { - result.edit_ = editBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()) return this; - if (other.hasWalKey()) { - mergeWalKey(other.getWalKey()); - } - if (other.hasEdit()) { - mergeEdit(other.getEdit()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasWalKey()) { - - return false; - } - if (!hasEdit()) { - - return false; - } - if (!getWalKey().isInitialized()) { - - return false; - } - if (!getEdit().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder(); - if (hasWalKey()) { - subBuilder.mergeFrom(getWalKey()); - } - input.readMessage(subBuilder, extensionRegistry); - setWalKey(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder(); - if (hasEdit()) { - subBuilder.mergeFrom(getEdit()); - } - input.readMessage(subBuilder, extensionRegistry); - setEdit(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .WALEntry.WALKey walKey = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder> walKeyBuilder_; - public boolean hasWalKey() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey() { - if (walKeyBuilder_ == null) { - return walKey_; - } else { - return walKeyBuilder_.getMessage(); - } - } - public Builder setWalKey(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey value) { - if (walKeyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - walKey_ = value; - onChanged(); - } else { - walKeyBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setWalKey( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder builderForValue) { - if (walKeyBuilder_ == null) { - walKey_ = builderForValue.build(); - onChanged(); - } else { - walKeyBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeWalKey(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey value) { - if (walKeyBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - walKey_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance()) { - walKey_ = - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder(walKey_).mergeFrom(value).buildPartial(); - } else { - walKey_ = value; - } - onChanged(); - } else { - walKeyBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearWalKey() { - if (walKeyBuilder_ == null) { - walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); - onChanged(); - } else { - walKeyBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder getWalKeyBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getWalKeyFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { - if (walKeyBuilder_ != null) { - return walKeyBuilder_.getMessageOrBuilder(); - } else { - return walKey_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder> - getWalKeyFieldBuilder() { - if (walKeyBuilder_ == null) { - walKeyBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder>( - walKey_, - getParentForChildren(), - isClean()); - walKey_ = null; - } - return walKeyBuilder_; - } - - // required .WALEntry.WALEdit edit = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; - public boolean hasEdit() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit() { - if (editBuilder_ == null) { - return edit_; - } else { - return editBuilder_.getMessage(); - } - } - public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit value) { - if (editBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - edit_ = value; - onChanged(); - } else { - editBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setEdit( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder builderForValue) { - if (editBuilder_ == null) { - edit_ = builderForValue.build(); - onChanged(); - } else { - editBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit value) { - if (editBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - edit_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance()) { - edit_ = - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); - } else { - edit_ = value; - } - onChanged(); - } else { - editBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearEdit() { - if (editBuilder_ == null) { - edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); - onChanged(); - } else { - editBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getEditFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { - if (editBuilder_ != null) { - return editBuilder_.getMessageOrBuilder(); - } else { - return edit_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder> - getEditFieldBuilder() { - if (editBuilder_ == null) { - editBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder>( - edit_, - getParentForChildren(), - isClean()); - edit_ = null; - } - return editBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WALEntry) - } - - static { - defaultInstance = new WALEntry(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry) - } - - public interface ReplicateWALEntryRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .WALEntry walEntry = 1; - java.util.List - getWalEntryList(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index); - int getWalEntryCount(); - java.util.List - getWalEntryOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( - int index); - } - public static final class ReplicateWALEntryRequest extends - com.google.protobuf.GeneratedMessage - implements ReplicateWALEntryRequestOrBuilder { - // Use ReplicateWALEntryRequest.newBuilder() to construct. - private ReplicateWALEntryRequest(Builder builder) { - super(builder); - } - private ReplicateWALEntryRequest(boolean noInit) {} - - private static final ReplicateWALEntryRequest defaultInstance; - public static ReplicateWALEntryRequest getDefaultInstance() { - return defaultInstance; - } - - public ReplicateWALEntryRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; - } - - // repeated .WALEntry walEntry = 1; - public static final int WALENTRY_FIELD_NUMBER = 1; - private java.util.List walEntry_; - public java.util.List getWalEntryList() { - return walEntry_; - } - public java.util.List - getWalEntryOrBuilderList() { - return walEntry_; - } - public int getWalEntryCount() { - return walEntry_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index) { - return walEntry_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( - int index) { - return walEntry_.get(index); - } - - private void initFields() { - walEntry_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getWalEntryCount(); i++) { - if (!getWalEntry(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < walEntry_.size(); i++) { - output.writeMessage(1, walEntry_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < walEntry_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, walEntry_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest) obj; - - boolean result = true; - result = result && getWalEntryList() - .equals(other.getWalEntryList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getWalEntryCount() > 0) { - hash = (37 * hash) + WALENTRY_FIELD_NUMBER; - hash = (53 * hash) + getWalEntryList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getWalEntryFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (walEntryBuilder_ == null) { - walEntry_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - walEntryBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest(this); - int from_bitField0_ = bitField0_; - if (walEntryBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - walEntry_ = java.util.Collections.unmodifiableList(walEntry_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.walEntry_ = walEntry_; - } else { - result.walEntry_ = walEntryBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this; - if (walEntryBuilder_ == null) { - if (!other.walEntry_.isEmpty()) { - if (walEntry_.isEmpty()) { - walEntry_ = other.walEntry_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureWalEntryIsMutable(); - walEntry_.addAll(other.walEntry_); - } - onChanged(); - } - } else { - if (!other.walEntry_.isEmpty()) { - if (walEntryBuilder_.isEmpty()) { - walEntryBuilder_.dispose(); - walEntryBuilder_ = null; - walEntry_ = other.walEntry_; - bitField0_ = (bitField0_ & ~0x00000001); - walEntryBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getWalEntryFieldBuilder() : null; - } else { - walEntryBuilder_.addAllMessages(other.walEntry_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getWalEntryCount(); i++) { - if (!getWalEntry(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addWalEntry(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .WALEntry walEntry = 1; - private java.util.List walEntry_ = - java.util.Collections.emptyList(); - private void ensureWalEntryIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - walEntry_ = new java.util.ArrayList(walEntry_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder> walEntryBuilder_; - - public java.util.List getWalEntryList() { - if (walEntryBuilder_ == null) { - return java.util.Collections.unmodifiableList(walEntry_); - } else { - return walEntryBuilder_.getMessageList(); - } - } - public int getWalEntryCount() { - if (walEntryBuilder_ == null) { - return walEntry_.size(); - } else { - return walEntryBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index) { - if (walEntryBuilder_ == null) { - return walEntry_.get(index); - } else { - return walEntryBuilder_.getMessage(index); - } - } - public Builder setWalEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { - if (walEntryBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureWalEntryIsMutable(); - walEntry_.set(index, value); - onChanged(); - } else { - walEntryBuilder_.setMessage(index, value); - } - return this; - } - public Builder setWalEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { - if (walEntryBuilder_ == null) { - ensureWalEntryIsMutable(); - walEntry_.set(index, builderForValue.build()); - onChanged(); - } else { - walEntryBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addWalEntry(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { - if (walEntryBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureWalEntryIsMutable(); - walEntry_.add(value); - onChanged(); - } else { - walEntryBuilder_.addMessage(value); - } - return this; - } - public Builder addWalEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { - if (walEntryBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureWalEntryIsMutable(); - walEntry_.add(index, value); - onChanged(); - } else { - walEntryBuilder_.addMessage(index, value); - } - return this; - } - public Builder addWalEntry( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { - if (walEntryBuilder_ == null) { - ensureWalEntryIsMutable(); - walEntry_.add(builderForValue.build()); - onChanged(); - } else { - walEntryBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addWalEntry( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { - if (walEntryBuilder_ == null) { - ensureWalEntryIsMutable(); - walEntry_.add(index, builderForValue.build()); - onChanged(); - } else { - walEntryBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllWalEntry( - java.lang.Iterable values) { - if (walEntryBuilder_ == null) { - ensureWalEntryIsMutable(); - super.addAll(values, walEntry_); - onChanged(); - } else { - walEntryBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearWalEntry() { - if (walEntryBuilder_ == null) { - walEntry_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - walEntryBuilder_.clear(); - } - return this; - } - public Builder removeWalEntry(int index) { - if (walEntryBuilder_ == null) { - ensureWalEntryIsMutable(); - walEntry_.remove(index); - onChanged(); - } else { - walEntryBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder getWalEntryBuilder( - int index) { - return getWalEntryFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( - int index) { - if (walEntryBuilder_ == null) { - return walEntry_.get(index); } else { - return walEntryBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getWalEntryOrBuilderList() { - if (walEntryBuilder_ != null) { - return walEntryBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(walEntry_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder addWalEntryBuilder() { - return getWalEntryFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder addWalEntryBuilder( - int index) { - return getWalEntryFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()); - } - public java.util.List - getWalEntryBuilderList() { - return getWalEntryFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder> - getWalEntryFieldBuilder() { - if (walEntryBuilder_ == null) { - walEntryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder>( - walEntry_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - walEntry_ = null; - } - return walEntryBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ReplicateWALEntryRequest) - } - - static { - defaultInstance = new ReplicateWALEntryRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReplicateWALEntryRequest) - } - - public interface ReplicateWALEntryResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class ReplicateWALEntryResponse extends - com.google.protobuf.GeneratedMessage - implements ReplicateWALEntryResponseOrBuilder { - // Use ReplicateWALEntryResponse.newBuilder() to construct. - private ReplicateWALEntryResponse(Builder builder) { - super(builder); - } - private ReplicateWALEntryResponse(boolean noInit) {} - - private static final ReplicateWALEntryResponse defaultInstance; - public static ReplicateWALEntryResponse getDefaultInstance() { - return defaultInstance; - } - - public ReplicateWALEntryResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:ReplicateWALEntryResponse) - } - - static { - defaultInstance = new ReplicateWALEntryResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ReplicateWALEntryResponse) - } - - public interface RollWALWriterRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class RollWALWriterRequest extends - com.google.protobuf.GeneratedMessage - implements RollWALWriterRequestOrBuilder { - // Use RollWALWriterRequest.newBuilder() to construct. - private RollWALWriterRequest(Builder builder) { - super(builder); - } - private RollWALWriterRequest(boolean noInit) {} - - private static final RollWALWriterRequest defaultInstance; - public static RollWALWriterRequest getDefaultInstance() { - return defaultInstance; - } - - public RollWALWriterRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:RollWALWriterRequest) - } - - static { - defaultInstance = new RollWALWriterRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RollWALWriterRequest) - } - - public interface RollWALWriterResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes regionToFlush = 1; - java.util.List getRegionToFlushList(); - int getRegionToFlushCount(); - com.google.protobuf.ByteString getRegionToFlush(int index); - } - public static final class RollWALWriterResponse extends - com.google.protobuf.GeneratedMessage - implements RollWALWriterResponseOrBuilder { - // Use RollWALWriterResponse.newBuilder() to construct. - private RollWALWriterResponse(Builder builder) { - super(builder); - } - private RollWALWriterResponse(boolean noInit) {} - - private static final RollWALWriterResponse defaultInstance; - public static RollWALWriterResponse getDefaultInstance() { - return defaultInstance; - } - - public RollWALWriterResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; - } - - // repeated bytes regionToFlush = 1; - public static final int REGIONTOFLUSH_FIELD_NUMBER = 1; - private java.util.List regionToFlush_; - public java.util.List - getRegionToFlushList() { - return regionToFlush_; - } - public int getRegionToFlushCount() { - return regionToFlush_.size(); - } - public com.google.protobuf.ByteString getRegionToFlush(int index) { - return regionToFlush_.get(index); - } - - private void initFields() { - regionToFlush_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < regionToFlush_.size(); i++) { - output.writeBytes(1, regionToFlush_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < regionToFlush_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(regionToFlush_.get(i)); - } - size += dataSize; - size += 1 * getRegionToFlushList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) obj; - - boolean result = true; - result = result && getRegionToFlushList() - .equals(other.getRegionToFlushList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getRegionToFlushCount() > 0) { - hash = (37 * hash) + REGIONTOFLUSH_FIELD_NUMBER; - hash = (53 * hash) + getRegionToFlushList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - regionToFlush_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.regionToFlush_ = regionToFlush_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance()) return this; - if (!other.regionToFlush_.isEmpty()) { - if (regionToFlush_.isEmpty()) { - regionToFlush_ = other.regionToFlush_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureRegionToFlushIsMutable(); - regionToFlush_.addAll(other.regionToFlush_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureRegionToFlushIsMutable(); - regionToFlush_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // repeated bytes regionToFlush = 1; - private java.util.List regionToFlush_ = java.util.Collections.emptyList();; - private void ensureRegionToFlushIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - regionToFlush_ = new java.util.ArrayList(regionToFlush_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getRegionToFlushList() { - return java.util.Collections.unmodifiableList(regionToFlush_); - } - public int getRegionToFlushCount() { - return regionToFlush_.size(); - } - public com.google.protobuf.ByteString getRegionToFlush(int index) { - return regionToFlush_.get(index); - } - public Builder setRegionToFlush( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionToFlushIsMutable(); - regionToFlush_.set(index, value); - onChanged(); - return this; - } - public Builder addRegionToFlush(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureRegionToFlushIsMutable(); - regionToFlush_.add(value); - onChanged(); - return this; - } - public Builder addAllRegionToFlush( - java.lang.Iterable values) { - ensureRegionToFlushIsMutable(); - super.addAll(values, regionToFlush_); - onChanged(); - return this; - } - public Builder clearRegionToFlush() { - regionToFlush_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RollWALWriterResponse) - } - - static { - defaultInstance = new RollWALWriterResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RollWALWriterResponse) - } - - public interface StopServerRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string reason = 1; - boolean hasReason(); - String getReason(); - } - public static final class StopServerRequest extends - com.google.protobuf.GeneratedMessage - implements StopServerRequestOrBuilder { - // Use StopServerRequest.newBuilder() to construct. - private StopServerRequest(Builder builder) { - super(builder); - } - private StopServerRequest(boolean noInit) {} - - private static final StopServerRequest defaultInstance; - public static StopServerRequest getDefaultInstance() { - return defaultInstance; - } - - public StopServerRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_fieldAccessorTable; - } - - private int bitField0_; - // required string reason = 1; - public static final int REASON_FIELD_NUMBER = 1; - private java.lang.Object reason_; - public boolean hasReason() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getReason() { - java.lang.Object ref = reason_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - reason_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getReasonBytes() { - java.lang.Object ref = reason_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - reason_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - reason_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasReason()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getReasonBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getReasonBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest) obj; - - boolean result = true; - result = result && (hasReason() == other.hasReason()); - if (hasReason()) { - result = result && getReason() - .equals(other.getReason()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasReason()) { - hash = (37 * hash) + REASON_FIELD_NUMBER; - hash = (53 * hash) + getReason().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - reason_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.reason_ = reason_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance()) return this; - if (other.hasReason()) { - setReason(other.getReason()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasReason()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - reason_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string reason = 1; - private java.lang.Object reason_ = ""; - public boolean hasReason() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getReason() { - java.lang.Object ref = reason_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - reason_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setReason(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - reason_ = value; - onChanged(); - return this; - } - public Builder clearReason() { - bitField0_ = (bitField0_ & ~0x00000001); - reason_ = getDefaultInstance().getReason(); - onChanged(); - return this; - } - void setReason(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - reason_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:StopServerRequest) - } - - static { - defaultInstance = new StopServerRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:StopServerRequest) - } - - public interface StopServerResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class StopServerResponse extends - com.google.protobuf.GeneratedMessage - implements StopServerResponseOrBuilder { - // Use StopServerResponse.newBuilder() to construct. - private StopServerResponse(Builder builder) { - super(builder); - } - private StopServerResponse(boolean noInit) {} - - private static final StopServerResponse defaultInstance; - public static StopServerResponse getDefaultInstance() { - return defaultInstance; - } - - public StopServerResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:StopServerResponse) - } - - static { - defaultInstance = new StopServerResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:StopServerResponse) - } - - public interface GetServerInfoRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class GetServerInfoRequest extends - com.google.protobuf.GeneratedMessage - implements GetServerInfoRequestOrBuilder { - // Use GetServerInfoRequest.newBuilder() to construct. - private GetServerInfoRequest(Builder builder) { - super(builder); - } - private GetServerInfoRequest(boolean noInit) {} - - private static final GetServerInfoRequest defaultInstance; - public static GetServerInfoRequest getDefaultInstance() { - return defaultInstance; - } - - public GetServerInfoRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:GetServerInfoRequest) - } - - static { - defaultInstance = new GetServerInfoRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetServerInfoRequest) - } - - public interface GetServerInfoResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .ServerName serverName = 1; - boolean hasServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); - } - public static final class GetServerInfoResponse extends - com.google.protobuf.GeneratedMessage - implements GetServerInfoResponseOrBuilder { - // Use GetServerInfoResponse.newBuilder() to construct. - private GetServerInfoResponse(Builder builder) { - super(builder); - } - private GetServerInfoResponse(boolean noInit) {} - - private static final GetServerInfoResponse defaultInstance; - public static GetServerInfoResponse getDefaultInstance() { - return defaultInstance; - } - - public GetServerInfoResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; - } - - private int bitField0_; - // required .ServerName serverName = 1; - public static final int SERVERNAME_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; - public boolean hasServerName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { - return serverName_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - return serverName_; - } - - private void initFields() { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasServerName()) { - memoizedIsInitialized = 0; - return false; - } - if (!getServerName().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, serverName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, serverName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) obj; - - boolean result = true; - result = result && (hasServerName() == other.hasServerName()); - if (hasServerName()) { - result = result && getServerName() - .equals(other.getServerName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasServerName()) { - hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; - hash = (53 * hash) + getServerName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getServerNameFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - } else { - serverNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (serverNameBuilder_ == null) { - result.serverName_ = serverName_; - } else { - result.serverName_ = serverNameBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance()) return this; - if (other.hasServerName()) { - mergeServerName(other.getServerName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasServerName()) { - - return false; - } - if (!getServerName().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); - if (hasServerName()) { - subBuilder.mergeFrom(getServerName()); - } - input.readMessage(subBuilder, extensionRegistry); - setServerName(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .ServerName serverName = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; - public boolean hasServerName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { - if (serverNameBuilder_ == null) { - return serverName_; - } else { - return serverNameBuilder_.getMessage(); - } - } - public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverNameBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - serverName_ = value; - onChanged(); - } else { - serverNameBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setServerName( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { - if (serverNameBuilder_ == null) { - serverName_ = builderForValue.build(); - onChanged(); - } else { - serverNameBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { - if (serverNameBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { - serverName_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); - } else { - serverName_ = value; - } - onChanged(); - } else { - serverNameBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearServerName() { - if (serverNameBuilder_ == null) { - serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); - onChanged(); - } else { - serverNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getServerNameFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { - if (serverNameBuilder_ != null) { - return serverNameBuilder_.getMessageOrBuilder(); - } else { - return serverName_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> - getServerNameFieldBuilder() { - if (serverNameBuilder_ == null) { - serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( - serverName_, - getParentForChildren(), - isClean()); - serverName_ = null; - } - return serverNameBuilder_; - } - - // @@protoc_insertion_point(builder_scope:GetServerInfoResponse) - } - - static { - defaultInstance = new GetServerInfoResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetServerInfoResponse) - } - - public static abstract class RegionAdminService - implements com.google.protobuf.Service { - protected RegionAdminService() {} - - public interface Interface { - public abstract void getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getStoreFileList( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new RegionAdminService() { - @java.lang.Override - public void getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done) { - impl.getRegionInfo(controller, request, done); - } - - @java.lang.Override - public void getStoreFileList( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, - com.google.protobuf.RpcCallback done) { - impl.getStoreFileList(controller, request, done); - } - - @java.lang.Override - public void getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.getOnlineRegion(controller, request, done); - } - - @java.lang.Override - public void openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.openRegion(controller, request, done); - } - - @java.lang.Override - public void closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.closeRegion(controller, request, done); - } - - @java.lang.Override - public void flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.flushRegion(controller, request, done); - } - - @java.lang.Override - public void splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.splitRegion(controller, request, done); - } - - @java.lang.Override - public void compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done) { - impl.compactRegion(controller, request, done); - } - - @java.lang.Override - public void replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done) { - impl.replicateWALEntry(controller, request, done); - } - - @java.lang.Override - public void rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done) { - impl.rollWALWriter(controller, request, done); - } - - @java.lang.Override - public void getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done) { - impl.getServerInfo(controller, request, done); - } - - @java.lang.Override - public void stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done) { - impl.stopServer(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)request); - case 1: - return impl.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)request); - case 2: - return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)request); - case 3: - return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)request); - case 4: - return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)request); - case 5: - return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)request); - case 6: - return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)request); - case 7: - return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)request); - case 8: - return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)request); - case 9: - return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)request); - case 10: - return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)request); - case 11: - return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getStoreFileList( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 4: - this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 5: - this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 6: - this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 7: - this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 8: - this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 9: - this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 10: - this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 11: - this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); - case 8: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); - case 9: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); - case 10: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); - case 11: - return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RegionAdminService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance())); - } - - public void getStoreFileList( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance())); - } - - public void getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance())); - } - - public void openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance())); - } - - public void closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance())); - } - - public void flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance())); - } - - public void splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance())); - } - - public void compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance())); - } - - public void replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(8), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance())); - } - - public void rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(9), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance())); - } - - public void getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(10), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance())); - } - - public void stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(11), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getStoreFileList( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getRegionInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getStoreFileList( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getOnlineRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse openRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse closeRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse flushRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse splitRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse compactRegion( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse replicateWALEntry( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(8), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse rollWALWriter( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(9), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getServerInfo( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(10), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse stopServer( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(11), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetRegionInfoRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetRegionInfoRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetRegionInfoResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetRegionInfoResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetStoreFileListRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetStoreFileListRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetStoreFileListResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetStoreFileListResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetOnlineRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetOnlineRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetOnlineRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetOnlineRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_OpenRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_OpenRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_OpenRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_OpenRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CloseRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CloseRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CloseRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CloseRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FlushRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FlushRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FlushRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FlushRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SplitRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SplitRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SplitRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SplitRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CompactRegionRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CompactRegionRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CompactRegionResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CompactRegionResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UUID_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UUID_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALKey_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALKey_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALEdit_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALEdit_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReplicateWALEntryRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReplicateWALEntryRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ReplicateWALEntryResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ReplicateWALEntryResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RollWALWriterRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RollWALWriterRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RollWALWriterResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RollWALWriterResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_StopServerRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_StopServerRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_StopServerResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_StopServerResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetServerInfoRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetServerInfoRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetServerInfoResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetServerInfoResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\021RegionAdmin.proto\032\013hbase.proto\"8\n\024GetR" + - "egionInfoRequest\022 \n\006region\030\001 \002(\0132\020.Regio" + - "nSpecifier\"8\n\025GetRegionInfoResponse\022\037\n\nr" + - "egionInfo\030\001 \002(\0132\013.RegionInfo\"Q\n\027GetStore" + - "FileListRequest\022 \n\006region\030\001 \002(\0132\020.Region" + - "Specifier\022\024\n\014columnFamily\030\002 \003(\014\"-\n\030GetSt" + - "oreFileListResponse\022\021\n\tstoreFile\030\001 \003(\t\"\030" + - "\n\026GetOnlineRegionRequest\":\n\027GetOnlineReg" + - "ionResponse\022\037\n\nregionInfo\030\001 \003(\0132\013.Region" + - "Info\"S\n\021OpenRegionRequest\022 \n\006region\030\001 \003(", - "\0132\020.RegionSpecifier\022\034\n\024versionOfOfflineN" + - "ode\030\002 \001(\r\"\234\001\n\022OpenRegionResponse\022<\n\014open" + - "ingState\030\001 \003(\0162&.OpenRegionResponse.Regi" + - "onOpeningState\"H\n\022RegionOpeningState\022\n\n\006" + - "OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022\n\016FAILED_O" + - "PENING\020\002\"r\n\022CloseRegionRequest\022 \n\006region" + - "\030\001 \002(\0132\020.RegionSpecifier\022\034\n\024versionOfClo" + - "singNode\030\002 \001(\r\022\034\n\016transitionInZK\030\003 \001(\010:\004" + - "true\"%\n\023CloseRegionResponse\022\016\n\006closed\030\001 " + - "\002(\010\"M\n\022FlushRegionRequest\022 \n\006region\030\001 \002(", - "\0132\020.RegionSpecifier\022\025\n\rifOlderThanTs\030\002 \001" + - "(\004\"=\n\023FlushRegionResponse\022\025\n\rlastFlushTi" + - "me\030\001 \002(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022SplitRegion" + - "Request\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" + - "r\022\022\n\nsplitPoint\030\002 \001(\014\"\025\n\023SplitRegionResp" + - "onse\"G\n\024CompactRegionRequest\022 \n\006region\030\001" + - " \002(\0132\020.RegionSpecifier\022\r\n\005major\030\002 \001(\010\"\027\n" + - "\025CompactRegionResponse\"1\n\004UUID\022\024\n\014leastS" + - "igBits\030\001 \002(\004\022\023\n\013mostSigBits\030\002 \002(\004\"\301\003\n\010WA" + - "LEntry\022 \n\006walKey\030\001 \002(\0132\020.WALEntry.WALKey", - "\022\037\n\004edit\030\002 \002(\0132\021.WALEntry.WALEdit\032~\n\006WAL" + - "Key\022\031\n\021encodedRegionName\030\001 \002(\014\022\021\n\ttableN" + - "ame\030\002 \002(\014\022\031\n\021logSequenceNumber\030\003 \002(\004\022\021\n\t" + - "writeTime\030\004 \002(\004\022\030\n\tclusterId\030\005 \001(\0132\005.UUI" + - "D\032\361\001\n\007WALEdit\022\033\n\010keyValue\030\001 \003(\0132\t.KeyVal" + - "ue\0222\n\013familyScope\030\002 \003(\0132\035.WALEntry.WALEd" + - "it.FamilyScope\032M\n\013FamilyScope\022\016\n\006family\030" + - "\001 \002(\014\022.\n\tscopeType\030\002 \002(\0162\033.WALEntry.WALE" + - "dit.ScopeType\"F\n\tScopeType\022\033\n\027REPLICATIO" + - "N_SCOPE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE_GLO", - "BAL\020\001\"7\n\030ReplicateWALEntryRequest\022\033\n\010wal" + - "Entry\030\001 \003(\0132\t.WALEntry\"\033\n\031ReplicateWALEn" + - "tryResponse\"\026\n\024RollWALWriterRequest\".\n\025R" + - "ollWALWriterResponse\022\025\n\rregionToFlush\030\001 " + - "\003(\014\"#\n\021StopServerRequest\022\016\n\006reason\030\001 \002(\t" + - "\"\024\n\022StopServerResponse\"\026\n\024GetServerInfoR" + - "equest\"8\n\025GetServerInfoResponse\022\037\n\nserve" + - "rName\030\001 \002(\0132\013.ServerName2\213\006\n\022RegionAdmin" + - "Service\022>\n\rgetRegionInfo\022\025.GetRegionInfo" + - "Request\032\026.GetRegionInfoResponse\022G\n\020getSt", - "oreFileList\022\030.GetStoreFileListRequest\032\031." + - "GetStoreFileListResponse\022D\n\017getOnlineReg" + - "ion\022\027.GetOnlineRegionRequest\032\030.GetOnline" + - "RegionResponse\0225\n\nopenRegion\022\022.OpenRegio" + - "nRequest\032\023.OpenRegionResponse\0228\n\013closeRe" + - "gion\022\023.CloseRegionRequest\032\024.CloseRegionR" + - "esponse\0228\n\013flushRegion\022\023.FlushRegionRequ" + - "est\032\024.FlushRegionResponse\0228\n\013splitRegion" + - "\022\023.SplitRegionRequest\032\024.SplitRegionRespo" + - "nse\022>\n\rcompactRegion\022\025.CompactRegionRequ", - "est\032\026.CompactRegionResponse\022J\n\021replicate" + - "WALEntry\022\031.ReplicateWALEntryRequest\032\032.Re" + - "plicateWALEntryResponse\022>\n\rrollWALWriter" + - "\022\025.RollWALWriterRequest\032\026.RollWALWriterR" + - "esponse\022>\n\rgetServerInfo\022\025.GetServerInfo" + - "Request\032\026.GetServerInfoResponse\0225\n\nstopS" + - "erver\022\022.StopServerRequest\032\023.StopServerRe" + - "sponseBG\n*org.apache.hadoop.hbase.protob" + - "uf.generatedB\021RegionAdminProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_GetRegionInfoRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_GetRegionInfoRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetRegionInfoRequest_descriptor, - new java.lang.String[] { "Region", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.Builder.class); - internal_static_GetRegionInfoResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_GetRegionInfoResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetRegionInfoResponse_descriptor, - new java.lang.String[] { "RegionInfo", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.Builder.class); - internal_static_GetStoreFileListRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_GetStoreFileListRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetStoreFileListRequest_descriptor, - new java.lang.String[] { "Region", "ColumnFamily", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.Builder.class); - internal_static_GetStoreFileListResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_GetStoreFileListResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetStoreFileListResponse_descriptor, - new java.lang.String[] { "StoreFile", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.Builder.class); - internal_static_GetOnlineRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_GetOnlineRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetOnlineRegionRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.Builder.class); - internal_static_GetOnlineRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_GetOnlineRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetOnlineRegionResponse_descriptor, - new java.lang.String[] { "RegionInfo", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.Builder.class); - internal_static_OpenRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_OpenRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_OpenRegionRequest_descriptor, - new java.lang.String[] { "Region", "VersionOfOfflineNode", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.Builder.class); - internal_static_OpenRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_OpenRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_OpenRegionResponse_descriptor, - new java.lang.String[] { "OpeningState", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.Builder.class); - internal_static_CloseRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_CloseRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CloseRegionRequest_descriptor, - new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.Builder.class); - internal_static_CloseRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_CloseRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CloseRegionResponse_descriptor, - new java.lang.String[] { "Closed", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.Builder.class); - internal_static_FlushRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_FlushRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FlushRegionRequest_descriptor, - new java.lang.String[] { "Region", "IfOlderThanTs", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.Builder.class); - internal_static_FlushRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_FlushRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FlushRegionResponse_descriptor, - new java.lang.String[] { "LastFlushTime", "Flushed", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.Builder.class); - internal_static_SplitRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_SplitRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SplitRegionRequest_descriptor, - new java.lang.String[] { "Region", "SplitPoint", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.Builder.class); - internal_static_SplitRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_SplitRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SplitRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.Builder.class); - internal_static_CompactRegionRequest_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_CompactRegionRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CompactRegionRequest_descriptor, - new java.lang.String[] { "Region", "Major", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.Builder.class); - internal_static_CompactRegionResponse_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_CompactRegionResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CompactRegionResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.Builder.class); - internal_static_UUID_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_UUID_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UUID_descriptor, - new java.lang.String[] { "LeastSigBits", "MostSigBits", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder.class); - internal_static_WALEntry_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_WALEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_descriptor, - new java.lang.String[] { "WalKey", "Edit", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder.class); - internal_static_WALEntry_WALKey_descriptor = - internal_static_WALEntry_descriptor.getNestedTypes().get(0); - internal_static_WALEntry_WALKey_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALKey_descriptor, - new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder.class); - internal_static_WALEntry_WALEdit_descriptor = - internal_static_WALEntry_descriptor.getNestedTypes().get(1); - internal_static_WALEntry_WALEdit_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALEdit_descriptor, - new java.lang.String[] { "KeyValue", "FamilyScope", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder.class); - internal_static_WALEntry_WALEdit_FamilyScope_descriptor = - internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0); - internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALEdit_FamilyScope_descriptor, - new java.lang.String[] { "Family", "ScopeType", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); - internal_static_ReplicateWALEntryRequest_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReplicateWALEntryRequest_descriptor, - new java.lang.String[] { "WalEntry", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.Builder.class); - internal_static_ReplicateWALEntryResponse_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ReplicateWALEntryResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.Builder.class); - internal_static_RollWALWriterRequest_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_RollWALWriterRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RollWALWriterRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.Builder.class); - internal_static_RollWALWriterResponse_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_RollWALWriterResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RollWALWriterResponse_descriptor, - new java.lang.String[] { "RegionToFlush", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.Builder.class); - internal_static_StopServerRequest_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_StopServerRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_StopServerRequest_descriptor, - new java.lang.String[] { "Reason", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.Builder.class); - internal_static_StopServerResponse_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_StopServerResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_StopServerResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.Builder.class); - internal_static_GetServerInfoRequest_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_GetServerInfoRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetServerInfoRequest_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.Builder.class); - internal_static_GetServerInfoResponse_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_GetServerInfoResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetServerInfoResponse_descriptor, - new java.lang.String[] { "ServerName", }, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java deleted file mode 100644 index b36a9c0..0000000 --- src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java +++ /dev/null @@ -1,21773 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: RegionClient.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class RegionClientProtos { - private RegionClientProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface ColumnOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // repeated bytes qualifier = 2; - java.util.List getQualifierList(); - int getQualifierCount(); - com.google.protobuf.ByteString getQualifier(int index); - } - public static final class Column extends - com.google.protobuf.GeneratedMessage - implements ColumnOrBuilder { - // Use Column.newBuilder() to construct. - private Column(Builder builder) { - super(builder); - } - private Column(boolean noInit) {} - - private static final Column defaultInstance; - public static Column getDefaultInstance() { - return defaultInstance; - } - - public Column getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_fieldAccessorTable; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // repeated bytes qualifier = 2; - public static final int QUALIFIER_FIELD_NUMBER = 2; - private java.util.List qualifier_; - public java.util.List - getQualifierList() { - return qualifier_; - } - public int getQualifierCount() { - return qualifier_.size(); - } - public com.google.protobuf.ByteString getQualifier(int index) { - return qualifier_.get(index); - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - for (int i = 0; i < qualifier_.size(); i++) { - output.writeBytes(2, qualifier_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - { - int dataSize = 0; - for (int i = 0; i < qualifier_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(qualifier_.get(i)); - } - size += dataSize; - size += 1 * getQualifierList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && getQualifierList() - .equals(other.getQualifierList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (getQualifierCount() > 0) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifierList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - qualifier_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - qualifier_ = java.util.Collections.unmodifiableList(qualifier_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.qualifier_ = qualifier_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (!other.qualifier_.isEmpty()) { - if (qualifier_.isEmpty()) { - qualifier_ = other.qualifier_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureQualifierIsMutable(); - qualifier_.addAll(other.qualifier_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - ensureQualifierIsMutable(); - qualifier_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // repeated bytes qualifier = 2; - private java.util.List qualifier_ = java.util.Collections.emptyList();; - private void ensureQualifierIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - qualifier_ = new java.util.ArrayList(qualifier_); - bitField0_ |= 0x00000002; - } - } - public java.util.List - getQualifierList() { - return java.util.Collections.unmodifiableList(qualifier_); - } - public int getQualifierCount() { - return qualifier_.size(); - } - public com.google.protobuf.ByteString getQualifier(int index) { - return qualifier_.get(index); - } - public Builder setQualifier( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierIsMutable(); - qualifier_.set(index, value); - onChanged(); - return this; - } - public Builder addQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierIsMutable(); - qualifier_.add(value); - onChanged(); - return this; - } - public Builder addAllQualifier( - java.lang.Iterable values) { - ensureQualifierIsMutable(); - super.addAll(values, qualifier_); - onChanged(); - return this; - } - public Builder clearQualifier() { - qualifier_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Column) - } - - static { - defaultInstance = new Column(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Column) - } - - public interface AttributeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - - // optional bytes value = 2; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class Attribute extends - com.google.protobuf.GeneratedMessage - implements AttributeOrBuilder { - // Use Attribute.newBuilder() to construct. - private Attribute(Builder builder) { - super(builder); - } - private Attribute(boolean noInit) {} - - private static final Attribute defaultInstance; - public static Attribute getDefaultInstance() { - return defaultInstance; - } - - public Attribute getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - name_ = ""; - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // optional bytes value = 2; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Attribute) - } - - static { - defaultInstance = new Attribute(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Attribute) - } - - public interface GetOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // repeated .Column column = 2; - java.util.List - getColumnList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index); - int getColumnCount(); - java.util.List - getColumnOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index); - - // repeated .Attribute attribute = 3; - java.util.List - getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); - int getAttributeCount(); - java.util.List - getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index); - - // optional uint64 lockId = 4; - boolean hasLockId(); - long getLockId(); - - // optional .Parameter filter = 5; - boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder(); - - // optional .TimeRange timeRange = 6; - boolean hasTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - - // optional uint32 maxVersions = 7 [default = 1]; - boolean hasMaxVersions(); - int getMaxVersions(); - - // optional bool cacheBlocks = 8 [default = true]; - boolean hasCacheBlocks(); - boolean getCacheBlocks(); - } - public static final class Get extends - com.google.protobuf.GeneratedMessage - implements GetOrBuilder { - // Use Get.newBuilder() to construct. - private Get(Builder builder) { - super(builder); - } - private Get(boolean noInit) {} - - private static final Get defaultInstance; - public static Get getDefaultInstance() { - return defaultInstance; - } - - public Get getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_fieldAccessorTable; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // repeated .Column column = 2; - public static final int COLUMN_FIELD_NUMBER = 2; - private java.util.List column_; - public java.util.List getColumnList() { - return column_; - } - public java.util.List - getColumnOrBuilderList() { - return column_; - } - public int getColumnCount() { - return column_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { - return column_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index) { - return column_.get(index); - } - - // repeated .Attribute attribute = 3; - public static final int ATTRIBUTE_FIELD_NUMBER = 3; - private java.util.List attribute_; - public java.util.List getAttributeList() { - return attribute_; - } - public java.util.List - getAttributeOrBuilderList() { - return attribute_; - } - public int getAttributeCount() { - return attribute_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { - return attribute_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index) { - return attribute_.get(index); - } - - // optional uint64 lockId = 4; - public static final int LOCKID_FIELD_NUMBER = 4; - private long lockId_; - public boolean hasLockId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getLockId() { - return lockId_; - } - - // optional .Parameter filter = 5; - public static final int FILTER_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { - return filter_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { - return filter_; - } - - // optional .TimeRange timeRange = 6; - public static final int TIMERANGE_FIELD_NUMBER = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; - } - - // optional uint32 maxVersions = 7 [default = 1]; - public static final int MAXVERSIONS_FIELD_NUMBER = 7; - private int maxVersions_; - public boolean hasMaxVersions() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public int getMaxVersions() { - return maxVersions_; - } - - // optional bool cacheBlocks = 8 [default = true]; - public static final int CACHEBLOCKS_FIELD_NUMBER = 8; - private boolean cacheBlocks_; - public boolean hasCacheBlocks() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public boolean getCacheBlocks() { - return cacheBlocks_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - column_ = java.util.Collections.emptyList(); - attribute_ = java.util.Collections.emptyList(); - lockId_ = 0L; - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - maxVersions_ = 1; - cacheBlocks_ = true; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getColumnCount(); i++) { - if (!getColumn(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasFilter()) { - if (!getFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - for (int i = 0; i < column_.size(); i++) { - output.writeMessage(2, column_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - output.writeMessage(3, attribute_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(4, lockId_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(5, filter_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(6, timeRange_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeUInt32(7, maxVersions_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBool(8, cacheBlocks_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - for (int i = 0; i < column_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, column_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, attribute_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(4, lockId_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, filter_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, timeRange_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(7, maxVersions_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(8, cacheBlocks_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && getColumnList() - .equals(other.getColumnList()); - result = result && getAttributeList() - .equals(other.getAttributeList()); - result = result && (hasLockId() == other.hasLockId()); - if (hasLockId()) { - result = result && (getLockId() - == other.getLockId()); - } - result = result && (hasFilter() == other.hasFilter()); - if (hasFilter()) { - result = result && getFilter() - .equals(other.getFilter()); - } - result = result && (hasTimeRange() == other.hasTimeRange()); - if (hasTimeRange()) { - result = result && getTimeRange() - .equals(other.getTimeRange()); - } - result = result && (hasMaxVersions() == other.hasMaxVersions()); - if (hasMaxVersions()) { - result = result && (getMaxVersions() - == other.getMaxVersions()); - } - result = result && (hasCacheBlocks() == other.hasCacheBlocks()); - if (hasCacheBlocks()) { - result = result && (getCacheBlocks() - == other.getCacheBlocks()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (getColumnCount() > 0) { - hash = (37 * hash) + COLUMN_FIELD_NUMBER; - hash = (53 * hash) + getColumnList().hashCode(); - } - if (getAttributeCount() > 0) { - hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; - hash = (53 * hash) + getAttributeList().hashCode(); - } - if (hasLockId()) { - hash = (37 * hash) + LOCKID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLockId()); - } - if (hasFilter()) { - hash = (37 * hash) + FILTER_FIELD_NUMBER; - hash = (53 * hash) + getFilter().hashCode(); - } - if (hasTimeRange()) { - hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; - hash = (53 * hash) + getTimeRange().hashCode(); - } - if (hasMaxVersions()) { - hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; - hash = (53 * hash) + getMaxVersions(); - } - if (hasCacheBlocks()) { - hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCacheBlocks()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getColumnFieldBuilder(); - getAttributeFieldBuilder(); - getFilterFieldBuilder(); - getTimeRangeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (columnBuilder_ == null) { - column_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - columnBuilder_.clear(); - } - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - } else { - attributeBuilder_.clear(); - } - lockId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); - maxVersions_ = 1; - bitField0_ = (bitField0_ & ~0x00000040); - cacheBlocks_ = true; - bitField0_ = (bitField0_ & ~0x00000080); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (columnBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - column_ = java.util.Collections.unmodifiableList(column_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.column_ = column_; - } else { - result.column_ = columnBuilder_.build(); - } - if (attributeBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004)) { - attribute_ = java.util.Collections.unmodifiableList(attribute_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.attribute_ = attribute_; - } else { - result.attribute_ = attributeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000002; - } - result.lockId_ = lockId_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000004; - } - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); - } - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000008; - } - if (timeRangeBuilder_ == null) { - result.timeRange_ = timeRange_; - } else { - result.timeRange_ = timeRangeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000010; - } - result.maxVersions_ = maxVersions_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000020; - } - result.cacheBlocks_ = cacheBlocks_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (columnBuilder_ == null) { - if (!other.column_.isEmpty()) { - if (column_.isEmpty()) { - column_ = other.column_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureColumnIsMutable(); - column_.addAll(other.column_); - } - onChanged(); - } - } else { - if (!other.column_.isEmpty()) { - if (columnBuilder_.isEmpty()) { - columnBuilder_.dispose(); - columnBuilder_ = null; - column_ = other.column_; - bitField0_ = (bitField0_ & ~0x00000002); - columnBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getColumnFieldBuilder() : null; - } else { - columnBuilder_.addAllMessages(other.column_); - } - } - } - if (attributeBuilder_ == null) { - if (!other.attribute_.isEmpty()) { - if (attribute_.isEmpty()) { - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureAttributeIsMutable(); - attribute_.addAll(other.attribute_); - } - onChanged(); - } - } else { - if (!other.attribute_.isEmpty()) { - if (attributeBuilder_.isEmpty()) { - attributeBuilder_.dispose(); - attributeBuilder_ = null; - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000004); - attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getAttributeFieldBuilder() : null; - } else { - attributeBuilder_.addAllMessages(other.attribute_); - } - } - } - if (other.hasLockId()) { - setLockId(other.getLockId()); - } - if (other.hasFilter()) { - mergeFilter(other.getFilter()); - } - if (other.hasTimeRange()) { - mergeTimeRange(other.getTimeRange()); - } - if (other.hasMaxVersions()) { - setMaxVersions(other.getMaxVersions()); - } - if (other.hasCacheBlocks()) { - setCacheBlocks(other.getCacheBlocks()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - for (int i = 0; i < getColumnCount(); i++) { - if (!getColumn(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - - return false; - } - } - if (hasFilter()) { - if (!getFilter().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumn(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 32: { - bitField0_ |= 0x00000008; - lockId_ = input.readUInt64(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - case 56: { - bitField0_ |= 0x00000040; - maxVersions_ = input.readUInt32(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - cacheBlocks_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // repeated .Column column = 2; - private java.util.List column_ = - java.util.Collections.emptyList(); - private void ensureColumnIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - column_ = new java.util.ArrayList(column_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> columnBuilder_; - - public java.util.List getColumnList() { - if (columnBuilder_ == null) { - return java.util.Collections.unmodifiableList(column_); - } else { - return columnBuilder_.getMessageList(); - } - } - public int getColumnCount() { - if (columnBuilder_ == null) { - return column_.size(); - } else { - return columnBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { - if (columnBuilder_ == null) { - return column_.get(index); - } else { - return columnBuilder_.getMessage(index); - } - } - public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.set(index, value); - onChanged(); - } else { - columnBuilder_.setMessage(index, value); - } - return this; - } - public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.set(index, builderForValue.build()); - onChanged(); - } else { - columnBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.add(value); - onChanged(); - } else { - columnBuilder_.addMessage(value); - } - return this; - } - public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.add(index, value); - onChanged(); - } else { - columnBuilder_.addMessage(index, value); - } - return this; - } - public Builder addColumn( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.add(builderForValue.build()); - onChanged(); - } else { - columnBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.add(index, builderForValue.build()); - onChanged(); - } else { - columnBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllColumn( - java.lang.Iterable values) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - super.addAll(values, column_); - onChanged(); - } else { - columnBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearColumn() { - if (columnBuilder_ == null) { - column_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - columnBuilder_.clear(); - } - return this; - } - public Builder removeColumn(int index) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.remove(index); - onChanged(); - } else { - columnBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder getColumnBuilder( - int index) { - return getColumnFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index) { - if (columnBuilder_ == null) { - return column_.get(index); } else { - return columnBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getColumnOrBuilderList() { - if (columnBuilder_ != null) { - return columnBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(column_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder() { - return getColumnFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder( - int index) { - return getColumnFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); - } - public java.util.List - getColumnBuilderList() { - return getColumnFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> - getColumnFieldBuilder() { - if (columnBuilder_ == null) { - columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder>( - column_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - column_ = null; - } - return columnBuilder_; - } - - // repeated .Attribute attribute = 3; - private java.util.List attribute_ = - java.util.Collections.emptyList(); - private void ensureAttributeIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - attribute_ = new java.util.ArrayList(attribute_); - bitField0_ |= 0x00000004; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; - - public java.util.List getAttributeList() { - if (attributeBuilder_ == null) { - return java.util.Collections.unmodifiableList(attribute_); - } else { - return attributeBuilder_.getMessageList(); - } - } - public int getAttributeCount() { - if (attributeBuilder_ == null) { - return attribute_.size(); - } else { - return attributeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); - } else { - return attributeBuilder_.getMessage(index); - } - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.set(index, value); - onChanged(); - } else { - attributeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.set(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(value); - onChanged(); - } else { - attributeBuilder_.addMessage(value); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(index, value); - onChanged(); - } else { - attributeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAttribute( - java.lang.Iterable values) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - super.addAll(values, attribute_); - onChanged(); - } else { - attributeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAttribute() { - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - } else { - attributeBuilder_.clear(); - } - return this; - } - public Builder removeAttribute(int index) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.remove(index); - onChanged(); - } else { - attributeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( - int index) { - return getAttributeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); } else { - return attributeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getAttributeOrBuilderList() { - if (attributeBuilder_ != null) { - return attributeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(attribute_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { - return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( - int index) { - return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); - } - public java.util.List - getAttributeBuilderList() { - return getAttributeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> - getAttributeFieldBuilder() { - if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( - attribute_, - ((bitField0_ & 0x00000004) == 0x00000004), - getParentForChildren(), - isClean()); - attribute_ = null; - } - return attributeBuilder_; - } - - // optional uint64 lockId = 4; - private long lockId_ ; - public boolean hasLockId() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getLockId() { - return lockId_; - } - public Builder setLockId(long value) { - bitField0_ |= 0x00000008; - lockId_ = value; - onChanged(); - return this; - } - public Builder clearLockId() { - bitField0_ = (bitField0_ & ~0x00000008); - lockId_ = 0L; - onChanged(); - return this; - } - - // optional .Parameter filter = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> filterBuilder_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { - if (filterBuilder_ == null) { - return filter_; - } else { - return filterBuilder_.getMessage(); - } - } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (filterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - filter_ = value; - onChanged(); - } else { - filterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (filterBuilder_ == null) { - filter_ = builderForValue.build(); - onChanged(); - } else { - filterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (filterBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { - filter_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(filter_).mergeFrom(value).buildPartial(); - } else { - filter_ = value; - } - onChanged(); - } else { - filterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - onChanged(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getFilterBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { - if (filterBuilder_ != null) { - return filterBuilder_.getMessageOrBuilder(); - } else { - return filter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> - getFilterFieldBuilder() { - if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( - filter_, - getParentForChildren(), - isClean()); - filter_ = null; - } - return filterBuilder_; - } - - // optional .TimeRange timeRange = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - if (timeRangeBuilder_ == null) { - return timeRange_; - } else { - return timeRangeBuilder_.getMessage(); - } - } - public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - timeRange_ = value; - onChanged(); - } else { - timeRangeBuilder_.setMessage(value); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder setTimeRange( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { - if (timeRangeBuilder_ == null) { - timeRange_ = builderForValue.build(); - onChanged(); - } else { - timeRangeBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020) && - timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { - timeRange_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); - } else { - timeRange_ = value; - } - onChanged(); - } else { - timeRangeBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder clearTimeRange() { - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - onChanged(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { - bitField0_ |= 0x00000020; - onChanged(); - return getTimeRangeFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - if (timeRangeBuilder_ != null) { - return timeRangeBuilder_.getMessageOrBuilder(); - } else { - return timeRange_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> - getTimeRangeFieldBuilder() { - if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, - getParentForChildren(), - isClean()); - timeRange_ = null; - } - return timeRangeBuilder_; - } - - // optional uint32 maxVersions = 7 [default = 1]; - private int maxVersions_ = 1; - public boolean hasMaxVersions() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getMaxVersions() { - return maxVersions_; - } - public Builder setMaxVersions(int value) { - bitField0_ |= 0x00000040; - maxVersions_ = value; - onChanged(); - return this; - } - public Builder clearMaxVersions() { - bitField0_ = (bitField0_ & ~0x00000040); - maxVersions_ = 1; - onChanged(); - return this; - } - - // optional bool cacheBlocks = 8 [default = true]; - private boolean cacheBlocks_ = true; - public boolean hasCacheBlocks() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public boolean getCacheBlocks() { - return cacheBlocks_; - } - public Builder setCacheBlocks(boolean value) { - bitField0_ |= 0x00000080; - cacheBlocks_ = value; - onChanged(); - return this; - } - public Builder clearCacheBlocks() { - bitField0_ = (bitField0_ & ~0x00000080); - cacheBlocks_ = true; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Get) - } - - static { - defaultInstance = new Get(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Get) - } - - public interface ResultOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .KeyValue value = 1; - java.util.List - getValueList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index); - int getValueCount(); - java.util.List - getValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( - int index); - } - public static final class Result extends - com.google.protobuf.GeneratedMessage - implements ResultOrBuilder { - // Use Result.newBuilder() to construct. - private Result(Builder builder) { - super(builder); - } - private Result(boolean noInit) {} - - private static final Result defaultInstance; - public static Result getDefaultInstance() { - return defaultInstance; - } - - public Result getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_fieldAccessorTable; - } - - // repeated .KeyValue value = 1; - public static final int VALUE_FIELD_NUMBER = 1; - private java.util.List value_; - public java.util.List getValueList() { - return value_; - } - public java.util.List - getValueOrBuilderList() { - return value_; - } - public int getValueCount() { - return value_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index) { - return value_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( - int index) { - return value_.get(index); - } - - private void initFields() { - value_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getValueCount(); i++) { - if (!getValue(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < value_.size(); i++) { - output.writeMessage(1, value_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < value_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, value_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result) obj; - - boolean result = true; - result = result && getValueList() - .equals(other.getValueList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getValueCount() > 0) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValueList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getValueFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (valueBuilder_ == null) { - value_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - valueBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result(this); - int from_bitField0_ = bitField0_; - if (valueBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - value_ = java.util.Collections.unmodifiableList(value_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.value_ = value_; - } else { - result.value_ = valueBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) return this; - if (valueBuilder_ == null) { - if (!other.value_.isEmpty()) { - if (value_.isEmpty()) { - value_ = other.value_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureValueIsMutable(); - value_.addAll(other.value_); - } - onChanged(); - } - } else { - if (!other.value_.isEmpty()) { - if (valueBuilder_.isEmpty()) { - valueBuilder_.dispose(); - valueBuilder_ = null; - value_ = other.value_; - bitField0_ = (bitField0_ & ~0x00000001); - valueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getValueFieldBuilder() : null; - } else { - valueBuilder_.addAllMessages(other.value_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getValueCount(); i++) { - if (!getValue(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addValue(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .KeyValue value = 1; - private java.util.List value_ = - java.util.Collections.emptyList(); - private void ensureValueIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - value_ = new java.util.ArrayList(value_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> valueBuilder_; - - public java.util.List getValueList() { - if (valueBuilder_ == null) { - return java.util.Collections.unmodifiableList(value_); - } else { - return valueBuilder_.getMessageList(); - } - } - public int getValueCount() { - if (valueBuilder_ == null) { - return value_.size(); - } else { - return valueBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index) { - if (valueBuilder_ == null) { - return value_.get(index); - } else { - return valueBuilder_.getMessage(index); - } - } - public Builder setValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureValueIsMutable(); - value_.set(index, value); - onChanged(); - } else { - valueBuilder_.setMessage(index, value); - } - return this; - } - public Builder setValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - value_.set(index, builderForValue.build()); - onChanged(); - } else { - valueBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureValueIsMutable(); - value_.add(value); - onChanged(); - } else { - valueBuilder_.addMessage(value); - } - return this; - } - public Builder addValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureValueIsMutable(); - value_.add(index, value); - onChanged(); - } else { - valueBuilder_.addMessage(index, value); - } - return this; - } - public Builder addValue( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - value_.add(builderForValue.build()); - onChanged(); - } else { - valueBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addValue( - int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - value_.add(index, builderForValue.build()); - onChanged(); - } else { - valueBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllValue( - java.lang.Iterable values) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - super.addAll(values, value_); - onChanged(); - } else { - valueBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - valueBuilder_.clear(); - } - return this; - } - public Builder removeValue(int index) { - if (valueBuilder_ == null) { - ensureValueIsMutable(); - value_.remove(index); - onChanged(); - } else { - valueBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder getValueBuilder( - int index) { - return getValueFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( - int index) { - if (valueBuilder_ == null) { - return value_.get(index); } else { - return valueBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getValueOrBuilderList() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(value_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addValueBuilder() { - return getValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addValueBuilder( - int index) { - return getValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); - } - public java.util.List - getValueBuilderList() { - return getValueFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder>( - value_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - value_ = null; - } - return valueBuilder_; - } - - // @@protoc_insertion_point(builder_scope:Result) - } - - static { - defaultInstance = new Result(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Result) - } - - public interface GetRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required .Get get = 2; - boolean hasGet(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder(); - - // optional bool closestRowBefore = 3; - boolean hasClosestRowBefore(); - boolean getClosestRowBefore(); - - // optional bool existenceOnly = 4; - boolean hasExistenceOnly(); - boolean getExistenceOnly(); - } - public static final class GetRequest extends - com.google.protobuf.GeneratedMessage - implements GetRequestOrBuilder { - // Use GetRequest.newBuilder() to construct. - private GetRequest(Builder builder) { - super(builder); - } - private GetRequest(boolean noInit) {} - - private static final GetRequest defaultInstance; - public static GetRequest getDefaultInstance() { - return defaultInstance; - } - - public GetRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required .Get get = 2; - public static final int GET_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get get_; - public boolean hasGet() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet() { - return get_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder() { - return get_; - } - - // optional bool closestRowBefore = 3; - public static final int CLOSESTROWBEFORE_FIELD_NUMBER = 3; - private boolean closestRowBefore_; - public boolean hasClosestRowBefore() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getClosestRowBefore() { - return closestRowBefore_; - } - - // optional bool existenceOnly = 4; - public static final int EXISTENCEONLY_FIELD_NUMBER = 4; - private boolean existenceOnly_; - public boolean hasExistenceOnly() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getExistenceOnly() { - return existenceOnly_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); - closestRowBefore_ = false; - existenceOnly_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasGet()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getGet().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, get_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBool(3, closestRowBefore_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBool(4, existenceOnly_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, get_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, closestRowBefore_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(4, existenceOnly_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasGet() == other.hasGet()); - if (hasGet()) { - result = result && getGet() - .equals(other.getGet()); - } - result = result && (hasClosestRowBefore() == other.hasClosestRowBefore()); - if (hasClosestRowBefore()) { - result = result && (getClosestRowBefore() - == other.getClosestRowBefore()); - } - result = result && (hasExistenceOnly() == other.hasExistenceOnly()); - if (hasExistenceOnly()) { - result = result && (getExistenceOnly() - == other.getExistenceOnly()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasGet()) { - hash = (37 * hash) + GET_FIELD_NUMBER; - hash = (53 * hash) + getGet().hashCode(); - } - if (hasClosestRowBefore()) { - hash = (37 * hash) + CLOSESTROWBEFORE_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getClosestRowBefore()); - } - if (hasExistenceOnly()) { - hash = (37 * hash) + EXISTENCEONLY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getExistenceOnly()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getGetFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); - } else { - getBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - closestRowBefore_ = false; - bitField0_ = (bitField0_ & ~0x00000004); - existenceOnly_ = false; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (getBuilder_ == null) { - result.get_ = get_; - } else { - result.get_ = getBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.closestRowBefore_ = closestRowBefore_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.existenceOnly_ = existenceOnly_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasGet()) { - mergeGet(other.getGet()); - } - if (other.hasClosestRowBefore()) { - setClosestRowBefore(other.getClosestRowBefore()); - } - if (other.hasExistenceOnly()) { - setExistenceOnly(other.getExistenceOnly()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasGet()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (!getGet().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder(); - if (hasGet()) { - subBuilder.mergeFrom(getGet()); - } - input.readMessage(subBuilder, extensionRegistry); - setGet(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - closestRowBefore_ = input.readBool(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - existenceOnly_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required .Get get = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder> getBuilder_; - public boolean hasGet() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet() { - if (getBuilder_ == null) { - return get_; - } else { - return getBuilder_.getMessage(); - } - } - public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get value) { - if (getBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - get_ = value; - onChanged(); - } else { - getBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setGet( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder builderForValue) { - if (getBuilder_ == null) { - get_ = builderForValue.build(); - onChanged(); - } else { - getBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get value) { - if (getBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - get_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance()) { - get_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); - } else { - get_ = value; - } - onChanged(); - } else { - getBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearGet() { - if (getBuilder_ == null) { - get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); - onChanged(); - } else { - getBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder getGetBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getGetFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder() { - if (getBuilder_ != null) { - return getBuilder_.getMessageOrBuilder(); - } else { - return get_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder> - getGetFieldBuilder() { - if (getBuilder_ == null) { - getBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder>( - get_, - getParentForChildren(), - isClean()); - get_ = null; - } - return getBuilder_; - } - - // optional bool closestRowBefore = 3; - private boolean closestRowBefore_ ; - public boolean hasClosestRowBefore() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getClosestRowBefore() { - return closestRowBefore_; - } - public Builder setClosestRowBefore(boolean value) { - bitField0_ |= 0x00000004; - closestRowBefore_ = value; - onChanged(); - return this; - } - public Builder clearClosestRowBefore() { - bitField0_ = (bitField0_ & ~0x00000004); - closestRowBefore_ = false; - onChanged(); - return this; - } - - // optional bool existenceOnly = 4; - private boolean existenceOnly_ ; - public boolean hasExistenceOnly() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getExistenceOnly() { - return existenceOnly_; - } - public Builder setExistenceOnly(boolean value) { - bitField0_ |= 0x00000008; - existenceOnly_ = value; - onChanged(); - return this; - } - public Builder clearExistenceOnly() { - bitField0_ = (bitField0_ & ~0x00000008); - existenceOnly_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetRequest) - } - - static { - defaultInstance = new GetRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetRequest) - } - - public interface GetResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .Result result = 1; - boolean hasResult(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder(); - - // optional bool exists = 2; - boolean hasExists(); - boolean getExists(); - } - public static final class GetResponse extends - com.google.protobuf.GeneratedMessage - implements GetResponseOrBuilder { - // Use GetResponse.newBuilder() to construct. - private GetResponse(Builder builder) { - super(builder); - } - private GetResponse(boolean noInit) {} - - private static final GetResponse defaultInstance; - public static GetResponse getDefaultInstance() { - return defaultInstance; - } - - public GetResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_fieldAccessorTable; - } - - private int bitField0_; - // optional .Result result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_; - public boolean hasResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { - return result_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { - return result_; - } - - // optional bool exists = 2; - public static final int EXISTS_FIELD_NUMBER = 2; - private boolean exists_; - public boolean hasExists() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getExists() { - return exists_; - } - - private void initFields() { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - exists_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasResult()) { - if (!getResult().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, result_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, exists_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, exists_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) obj; - - boolean result = true; - result = result && (hasResult() == other.hasResult()); - if (hasResult()) { - result = result && getResult() - .equals(other.getResult()); - } - result = result && (hasExists() == other.hasExists()); - if (hasExists()) { - result = result && (getExists() - == other.getExists()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasResult()) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResult().hashCode(); - } - if (hasExists()) { - hash = (37 * hash) + EXISTS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getExists()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResultFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - } else { - resultBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - exists_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (resultBuilder_ == null) { - result.result_ = result_; - } else { - result.result_ = resultBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.exists_ = exists_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance()) return this; - if (other.hasResult()) { - mergeResult(other.getResult()); - } - if (other.hasExists()) { - setExists(other.getExists()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasResult()) { - if (!getResult().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); - if (hasResult()) { - subBuilder.mergeFrom(getResult()); - } - input.readMessage(subBuilder, extensionRegistry); - setResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - exists_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional .Result result = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; - public boolean hasResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { - if (resultBuilder_ == null) { - return result_; - } else { - return resultBuilder_.getMessage(); - } - } - public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - result_ = value; - onChanged(); - } else { - resultBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setResult( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - result_ = builderForValue.build(); - onChanged(); - } else { - resultBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { - if (resultBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - result_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) { - result_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); - } else { - result_ = value; - } - onChanged(); - } else { - resultBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearResult() { - if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - onChanged(); - } else { - resultBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getResultFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { - if (resultBuilder_ != null) { - return resultBuilder_.getMessageOrBuilder(); - } else { - return result_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> - getResultFieldBuilder() { - if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( - result_, - getParentForChildren(), - isClean()); - result_ = null; - } - return resultBuilder_; - } - - // optional bool exists = 2; - private boolean exists_ ; - public boolean hasExists() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getExists() { - return exists_; - } - public Builder setExists(boolean value) { - bitField0_ |= 0x00000002; - exists_ = value; - onChanged(); - return this; - } - public Builder clearExists() { - bitField0_ = (bitField0_ & ~0x00000002); - exists_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:GetResponse) - } - - static { - defaultInstance = new GetResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GetResponse) - } - - public interface ConditionOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // required bytes family = 2; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // required bytes qualifier = 3; - boolean hasQualifier(); - com.google.protobuf.ByteString getQualifier(); - - // required .Condition.CompareType compareType = 4; - boolean hasCompareType(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType(); - - // required .Condition.Comparator comparator = 5; - boolean hasComparator(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator(); - - // optional bytes value = 6; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - } - public static final class Condition extends - com.google.protobuf.GeneratedMessage - implements ConditionOrBuilder { - // Use Condition.newBuilder() to construct. - private Condition(Builder builder) { - super(builder); - } - private Condition(boolean noInit) {} - - private static final Condition defaultInstance; - public static Condition getDefaultInstance() { - return defaultInstance; - } - - public Condition getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_fieldAccessorTable; - } - - public enum CompareType - implements com.google.protobuf.ProtocolMessageEnum { - LESS(0, 0), - LESS_OR_EQUAL(1, 1), - EQUAL(2, 2), - NOT_EQUAL(3, 3), - GREATER_OR_EQUAL(4, 4), - GREATER(5, 5), - NO_OP(6, 6), - ; - - public static final int LESS_VALUE = 0; - public static final int LESS_OR_EQUAL_VALUE = 1; - public static final int EQUAL_VALUE = 2; - public static final int NOT_EQUAL_VALUE = 3; - public static final int GREATER_OR_EQUAL_VALUE = 4; - public static final int GREATER_VALUE = 5; - public static final int NO_OP_VALUE = 6; - - - public final int getNumber() { return value; } - - public static CompareType valueOf(int value) { - switch (value) { - case 0: return LESS; - case 1: return LESS_OR_EQUAL; - case 2: return EQUAL; - case 3: return NOT_EQUAL; - case 4: return GREATER_OR_EQUAL; - case 5: return GREATER; - case 6: return NO_OP; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public CompareType findValueByNumber(int number) { - return CompareType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor().getEnumTypes().get(0); - } - - private static final CompareType[] VALUES = { - LESS, LESS_OR_EQUAL, EQUAL, NOT_EQUAL, GREATER_OR_EQUAL, GREATER, NO_OP, - }; - - public static CompareType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private CompareType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Condition.CompareType) - } - - public enum Comparator - implements com.google.protobuf.ProtocolMessageEnum { - BINARY_COMPARATOR(0, 0), - BINARY_PREFIX_COMPARATOR(1, 1), - BIT_AND_COMPARATOR(2, 2), - BIT_OR_COMPARATOR(3, 3), - BIT_XOR_COMPARATOR(4, 4), - NULL_COMPARATOR(5, 5), - REGEX_STRING_COMPARATOR(6, 6), - SUBSTRING_COMPARATOR(7, 7), - ; - - public static final int BINARY_COMPARATOR_VALUE = 0; - public static final int BINARY_PREFIX_COMPARATOR_VALUE = 1; - public static final int BIT_AND_COMPARATOR_VALUE = 2; - public static final int BIT_OR_COMPARATOR_VALUE = 3; - public static final int BIT_XOR_COMPARATOR_VALUE = 4; - public static final int NULL_COMPARATOR_VALUE = 5; - public static final int REGEX_STRING_COMPARATOR_VALUE = 6; - public static final int SUBSTRING_COMPARATOR_VALUE = 7; - - - public final int getNumber() { return value; } - - public static Comparator valueOf(int value) { - switch (value) { - case 0: return BINARY_COMPARATOR; - case 1: return BINARY_PREFIX_COMPARATOR; - case 2: return BIT_AND_COMPARATOR; - case 3: return BIT_OR_COMPARATOR; - case 4: return BIT_XOR_COMPARATOR; - case 5: return NULL_COMPARATOR; - case 6: return REGEX_STRING_COMPARATOR; - case 7: return SUBSTRING_COMPARATOR; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Comparator findValueByNumber(int number) { - return Comparator.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor().getEnumTypes().get(1); - } - - private static final Comparator[] VALUES = { - BINARY_COMPARATOR, BINARY_PREFIX_COMPARATOR, BIT_AND_COMPARATOR, BIT_OR_COMPARATOR, BIT_XOR_COMPARATOR, NULL_COMPARATOR, REGEX_STRING_COMPARATOR, SUBSTRING_COMPARATOR, - }; - - public static Comparator valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Comparator(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Condition.Comparator) - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required bytes family = 2; - public static final int FAMILY_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // required bytes qualifier = 3; - public static final int QUALIFIER_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString qualifier_; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - // required .Condition.CompareType compareType = 4; - public static final int COMPARETYPE_FIELD_NUMBER = 4; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType compareType_; - public boolean hasCompareType() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType() { - return compareType_; - } - - // required .Condition.Comparator comparator = 5; - public static final int COMPARATOR_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator comparator_; - public boolean hasComparator() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator() { - return comparator_; - } - - // optional bytes value = 6; - public static final int VALUE_FIELD_NUMBER = 6; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; - comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; - value_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasQualifier()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasCompareType()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasComparator()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, family_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, qualifier_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeEnum(4, compareType_.getNumber()); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeEnum(5, comparator_.getNumber()); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBytes(6, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, family_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, qualifier_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(4, compareType_.getNumber()); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(5, comparator_.getNumber()); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(6, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && (hasCompareType() == other.hasCompareType()); - if (hasCompareType()) { - result = result && - (getCompareType() == other.getCompareType()); - } - result = result && (hasComparator() == other.hasComparator()); - if (hasComparator()) { - result = result && - (getComparator() == other.getComparator()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - if (hasCompareType()) { - hash = (37 * hash) + COMPARETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getCompareType()); - } - if (hasComparator()) { - hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getComparator()); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; - bitField0_ = (bitField0_ & ~0x00000008); - comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; - bitField0_ = (bitField0_ & ~0x00000010); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.qualifier_ = qualifier_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.compareType_ = compareType_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.comparator_ = comparator_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - if (other.hasCompareType()) { - setCompareType(other.getCompareType()); - } - if (other.hasComparator()) { - setComparator(other.getComparator()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasFamily()) { - - return false; - } - if (!hasQualifier()) { - - return false; - } - if (!hasCompareType()) { - - return false; - } - if (!hasComparator()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - family_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - qualifier_ = input.readBytes(); - break; - } - case 32: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(4, rawValue); - } else { - bitField0_ |= 0x00000008; - compareType_ = value; - } - break; - } - case 40: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(5, rawValue); - } else { - bitField0_ |= 0x00000010; - comparator_ = value; - } - break; - } - case 50: { - bitField0_ |= 0x00000020; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required bytes family = 2; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000002); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // required bytes qualifier = 3; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - qualifier_ = value; - onChanged(); - return this; - } - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000004); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // required .Condition.CompareType compareType = 4; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; - public boolean hasCompareType() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType() { - return compareType_; - } - public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - compareType_ = value; - onChanged(); - return this; - } - public Builder clearCompareType() { - bitField0_ = (bitField0_ & ~0x00000008); - compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; - onChanged(); - return this; - } - - // required .Condition.Comparator comparator = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; - public boolean hasComparator() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator() { - return comparator_; - } - public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000010; - comparator_ = value; - onChanged(); - return this; - } - public Builder clearComparator() { - bitField0_ = (bitField0_ & ~0x00000010); - comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; - onChanged(); - return this; - } - - // optional bytes value = 6; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000020; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000020); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Condition) - } - - static { - defaultInstance = new Condition(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Condition) - } - - public interface MutateOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // required .Mutate.MutateType mutateType = 2; - boolean hasMutateType(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType(); - - // repeated .Mutate.ColumnValue columnValue = 3; - java.util.List - getColumnValueList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index); - int getColumnValueCount(); - java.util.List - getColumnValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( - int index); - - // repeated .Attribute attribute = 4; - java.util.List - getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); - int getAttributeCount(); - java.util.List - getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index); - - // optional uint64 timestamp = 5; - boolean hasTimestamp(); - long getTimestamp(); - - // optional uint64 lockId = 6; - boolean hasLockId(); - long getLockId(); - - // optional bool writeToWAL = 7 [default = true]; - boolean hasWriteToWAL(); - boolean getWriteToWAL(); - - // optional .TimeRange timeRange = 10; - boolean hasTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - } - public static final class Mutate extends - com.google.protobuf.GeneratedMessage - implements MutateOrBuilder { - // Use Mutate.newBuilder() to construct. - private Mutate(Builder builder) { - super(builder); - } - private Mutate(boolean noInit) {} - - private static final Mutate defaultInstance; - public static Mutate getDefaultInstance() { - return defaultInstance; - } - - public Mutate getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_fieldAccessorTable; - } - - public enum MutateType - implements com.google.protobuf.ProtocolMessageEnum { - APPEND(0, 0), - INCREMENT(1, 1), - PUT(2, 2), - DELETE(3, 3), - DELETE_COLUMN(4, 4), - DELETE_FAMILY(5, 5), - ; - - public static final int APPEND_VALUE = 0; - public static final int INCREMENT_VALUE = 1; - public static final int PUT_VALUE = 2; - public static final int DELETE_VALUE = 3; - public static final int DELETE_COLUMN_VALUE = 4; - public static final int DELETE_FAMILY_VALUE = 5; - - - public final int getNumber() { return value; } - - public static MutateType valueOf(int value) { - switch (value) { - case 0: return APPEND; - case 1: return INCREMENT; - case 2: return PUT; - case 3: return DELETE; - case 4: return DELETE_COLUMN; - case 5: return DELETE_FAMILY; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public MutateType findValueByNumber(int number) { - return MutateType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDescriptor().getEnumTypes().get(0); - } - - private static final MutateType[] VALUES = { - APPEND, INCREMENT, PUT, DELETE, DELETE_COLUMN, DELETE_FAMILY, - }; - - public static MutateType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private MutateType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:Mutate.MutateType) - } - - public interface ColumnValueOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; - java.util.List - getQualifierValueList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index); - int getQualifierValueCount(); - java.util.List - getQualifierValueOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( - int index); - - // optional uint64 timestamp = 3; - boolean hasTimestamp(); - long getTimestamp(); - } - public static final class ColumnValue extends - com.google.protobuf.GeneratedMessage - implements ColumnValueOrBuilder { - // Use ColumnValue.newBuilder() to construct. - private ColumnValue(Builder builder) { - super(builder); - } - private ColumnValue(boolean noInit) {} - - private static final ColumnValue defaultInstance; - public static ColumnValue getDefaultInstance() { - return defaultInstance; - } - - public ColumnValue getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; - } - - public interface QualifierValueOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes qualifier = 1; - boolean hasQualifier(); - com.google.protobuf.ByteString getQualifier(); - - // optional bytes value = 2; - boolean hasValue(); - com.google.protobuf.ByteString getValue(); - - // optional uint64 timestamp = 3; - boolean hasTimestamp(); - long getTimestamp(); - } - public static final class QualifierValue extends - com.google.protobuf.GeneratedMessage - implements QualifierValueOrBuilder { - // Use QualifierValue.newBuilder() to construct. - private QualifierValue(Builder builder) { - super(builder); - } - private QualifierValue(boolean noInit) {} - - private static final QualifierValue defaultInstance; - public static QualifierValue getDefaultInstance() { - return defaultInstance; - } - - public QualifierValue getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; - } - - private int bitField0_; - // required bytes qualifier = 1; - public static final int QUALIFIER_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString qualifier_; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - // optional bytes value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - - // optional uint64 timestamp = 3; - public static final int TIMESTAMP_FIELD_NUMBER = 3; - private long timestamp_; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getTimestamp() { - return timestamp_; - } - - private void initFields() { - qualifier_ = com.google.protobuf.ByteString.EMPTY; - value_ = com.google.protobuf.ByteString.EMPTY; - timestamp_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasQualifier()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, qualifier_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, value_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, timestamp_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, qualifier_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, timestamp_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue) obj; - - boolean result = true; - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.qualifier_ = qualifier_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.timestamp_ = timestamp_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()) return this; - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasQualifier()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - qualifier_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - timestamp_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes qualifier = 1; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasQualifier() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - qualifier_ = value; - onChanged(); - return this; - } - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000001); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // optional bytes value = 2; - private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getValue() { - return value_; - } - public Builder setValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - - // optional uint64 timestamp = 3; - private long timestamp_ ; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getTimestamp() { - return timestamp_; - } - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000004; - timestamp_ = value; - onChanged(); - return this; - } - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000004); - timestamp_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue.QualifierValue) - } - - static { - defaultInstance = new QualifierValue(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Mutate.ColumnValue.QualifierValue) - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; - public static final int QUALIFIERVALUE_FIELD_NUMBER = 2; - private java.util.List qualifierValue_; - public java.util.List getQualifierValueList() { - return qualifierValue_; - } - public java.util.List - getQualifierValueOrBuilderList() { - return qualifierValue_; - } - public int getQualifierValueCount() { - return qualifierValue_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { - return qualifierValue_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( - int index) { - return qualifierValue_.get(index); - } - - // optional uint64 timestamp = 3; - public static final int TIMESTAMP_FIELD_NUMBER = 3; - private long timestamp_; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getTimestamp() { - return timestamp_; - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifierValue_ = java.util.Collections.emptyList(); - timestamp_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getQualifierValueCount(); i++) { - if (!getQualifierValue(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - for (int i = 0; i < qualifierValue_.size(); i++) { - output.writeMessage(2, qualifierValue_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(3, timestamp_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - for (int i = 0; i < qualifierValue_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, qualifierValue_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, timestamp_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && getQualifierValueList() - .equals(other.getQualifierValueList()); - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (getQualifierValueCount() > 0) { - hash = (37 * hash) + QUALIFIERVALUE_FIELD_NUMBER; - hash = (53 * hash) + getQualifierValueList().hashCode(); - } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getQualifierValueFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (qualifierValueBuilder_ == null) { - qualifierValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - qualifierValueBuilder_.clear(); - } - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (qualifierValueBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.qualifierValue_ = qualifierValue_; - } else { - result.qualifierValue_ = qualifierValueBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000002; - } - result.timestamp_ = timestamp_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (qualifierValueBuilder_ == null) { - if (!other.qualifierValue_.isEmpty()) { - if (qualifierValue_.isEmpty()) { - qualifierValue_ = other.qualifierValue_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureQualifierValueIsMutable(); - qualifierValue_.addAll(other.qualifierValue_); - } - onChanged(); - } - } else { - if (!other.qualifierValue_.isEmpty()) { - if (qualifierValueBuilder_.isEmpty()) { - qualifierValueBuilder_.dispose(); - qualifierValueBuilder_ = null; - qualifierValue_ = other.qualifierValue_; - bitField0_ = (bitField0_ & ~0x00000002); - qualifierValueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getQualifierValueFieldBuilder() : null; - } else { - qualifierValueBuilder_.addAllMessages(other.qualifierValue_); - } - } - } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - for (int i = 0; i < getQualifierValueCount(); i++) { - if (!getQualifierValue(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addQualifierValue(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - timestamp_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; - private java.util.List qualifierValue_ = - java.util.Collections.emptyList(); - private void ensureQualifierValueIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - qualifierValue_ = new java.util.ArrayList(qualifierValue_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; - - public java.util.List getQualifierValueList() { - if (qualifierValueBuilder_ == null) { - return java.util.Collections.unmodifiableList(qualifierValue_); - } else { - return qualifierValueBuilder_.getMessageList(); - } - } - public int getQualifierValueCount() { - if (qualifierValueBuilder_ == null) { - return qualifierValue_.size(); - } else { - return qualifierValueBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { - if (qualifierValueBuilder_ == null) { - return qualifierValue_.get(index); - } else { - return qualifierValueBuilder_.getMessage(index); - } - } - public Builder setQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { - if (qualifierValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierValueIsMutable(); - qualifierValue_.set(index, value); - onChanged(); - } else { - qualifierValueBuilder_.setMessage(index, value); - } - return this; - } - public Builder setQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - qualifierValue_.set(index, builderForValue.build()); - onChanged(); - } else { - qualifierValueBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { - if (qualifierValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierValueIsMutable(); - qualifierValue_.add(value); - onChanged(); - } else { - qualifierValueBuilder_.addMessage(value); - } - return this; - } - public Builder addQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { - if (qualifierValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureQualifierValueIsMutable(); - qualifierValue_.add(index, value); - onChanged(); - } else { - qualifierValueBuilder_.addMessage(index, value); - } - return this; - } - public Builder addQualifierValue( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - qualifierValue_.add(builderForValue.build()); - onChanged(); - } else { - qualifierValueBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addQualifierValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - qualifierValue_.add(index, builderForValue.build()); - onChanged(); - } else { - qualifierValueBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllQualifierValue( - java.lang.Iterable values) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - super.addAll(values, qualifierValue_); - onChanged(); - } else { - qualifierValueBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearQualifierValue() { - if (qualifierValueBuilder_ == null) { - qualifierValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - qualifierValueBuilder_.clear(); - } - return this; - } - public Builder removeQualifierValue(int index) { - if (qualifierValueBuilder_ == null) { - ensureQualifierValueIsMutable(); - qualifierValue_.remove(index); - onChanged(); - } else { - qualifierValueBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( - int index) { - return getQualifierValueFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( - int index) { - if (qualifierValueBuilder_ == null) { - return qualifierValue_.get(index); } else { - return qualifierValueBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getQualifierValueOrBuilderList() { - if (qualifierValueBuilder_ != null) { - return qualifierValueBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(qualifierValue_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { - return getQualifierValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( - int index) { - return getQualifierValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); - } - public java.util.List - getQualifierValueBuilderList() { - return getQualifierValueFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> - getQualifierValueFieldBuilder() { - if (qualifierValueBuilder_ == null) { - qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder>( - qualifierValue_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - qualifierValue_ = null; - } - return qualifierValueBuilder_; - } - - // optional uint64 timestamp = 3; - private long timestamp_ ; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getTimestamp() { - return timestamp_; - } - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000004; - timestamp_ = value; - onChanged(); - return this; - } - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000004); - timestamp_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue) - } - - static { - defaultInstance = new ColumnValue(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Mutate.ColumnValue) - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required .Mutate.MutateType mutateType = 2; - public static final int MUTATETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType mutateType_; - public boolean hasMutateType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType() { - return mutateType_; - } - - // repeated .Mutate.ColumnValue columnValue = 3; - public static final int COLUMNVALUE_FIELD_NUMBER = 3; - private java.util.List columnValue_; - public java.util.List getColumnValueList() { - return columnValue_; - } - public java.util.List - getColumnValueOrBuilderList() { - return columnValue_; - } - public int getColumnValueCount() { - return columnValue_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index) { - return columnValue_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( - int index) { - return columnValue_.get(index); - } - - // repeated .Attribute attribute = 4; - public static final int ATTRIBUTE_FIELD_NUMBER = 4; - private java.util.List attribute_; - public java.util.List getAttributeList() { - return attribute_; - } - public java.util.List - getAttributeOrBuilderList() { - return attribute_; - } - public int getAttributeCount() { - return attribute_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { - return attribute_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index) { - return attribute_.get(index); - } - - // optional uint64 timestamp = 5; - public static final int TIMESTAMP_FIELD_NUMBER = 5; - private long timestamp_; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getTimestamp() { - return timestamp_; - } - - // optional uint64 lockId = 6; - public static final int LOCKID_FIELD_NUMBER = 6; - private long lockId_; - public boolean hasLockId() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getLockId() { - return lockId_; - } - - // optional bool writeToWAL = 7 [default = true]; - public static final int WRITETOWAL_FIELD_NUMBER = 7; - private boolean writeToWAL_; - public boolean hasWriteToWAL() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public boolean getWriteToWAL() { - return writeToWAL_; - } - - // optional .TimeRange timeRange = 10; - public static final int TIMERANGE_FIELD_NUMBER = 10; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; - columnValue_ = java.util.Collections.emptyList(); - attribute_ = java.util.Collections.emptyList(); - timestamp_ = 0L; - lockId_ = 0L; - writeToWAL_ = true; - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMutateType()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getColumnValueCount(); i++) { - if (!getColumnValue(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, mutateType_.getNumber()); - } - for (int i = 0; i < columnValue_.size(); i++) { - output.writeMessage(3, columnValue_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - output.writeMessage(4, attribute_.get(i)); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(5, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt64(6, lockId_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBool(7, writeToWAL_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeMessage(10, timeRange_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, mutateType_.getNumber()); - } - for (int i = 0; i < columnValue_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, columnValue_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, attribute_.get(i)); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(5, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(6, lockId_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(7, writeToWAL_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(10, timeRange_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasMutateType() == other.hasMutateType()); - if (hasMutateType()) { - result = result && - (getMutateType() == other.getMutateType()); - } - result = result && getColumnValueList() - .equals(other.getColumnValueList()); - result = result && getAttributeList() - .equals(other.getAttributeList()); - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } - result = result && (hasLockId() == other.hasLockId()); - if (hasLockId()) { - result = result && (getLockId() - == other.getLockId()); - } - result = result && (hasWriteToWAL() == other.hasWriteToWAL()); - if (hasWriteToWAL()) { - result = result && (getWriteToWAL() - == other.getWriteToWAL()); - } - result = result && (hasTimeRange() == other.hasTimeRange()); - if (hasTimeRange()) { - result = result && getTimeRange() - .equals(other.getTimeRange()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasMutateType()) { - hash = (37 * hash) + MUTATETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getMutateType()); - } - if (getColumnValueCount() > 0) { - hash = (37 * hash) + COLUMNVALUE_FIELD_NUMBER; - hash = (53 * hash) + getColumnValueList().hashCode(); - } - if (getAttributeCount() > 0) { - hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; - hash = (53 * hash) + getAttributeList().hashCode(); - } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } - if (hasLockId()) { - hash = (37 * hash) + LOCKID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLockId()); - } - if (hasWriteToWAL()) { - hash = (37 * hash) + WRITETOWAL_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getWriteToWAL()); - } - if (hasTimeRange()) { - hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; - hash = (53 * hash) + getTimeRange().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getColumnValueFieldBuilder(); - getAttributeFieldBuilder(); - getTimeRangeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; - bitField0_ = (bitField0_ & ~0x00000002); - if (columnValueBuilder_ == null) { - columnValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - } else { - columnValueBuilder_.clear(); - } - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - } else { - attributeBuilder_.clear(); - } - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000010); - lockId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000020); - writeToWAL_ = true; - bitField0_ = (bitField0_ & ~0x00000040); - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000080); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.mutateType_ = mutateType_; - if (columnValueBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004)) { - columnValue_ = java.util.Collections.unmodifiableList(columnValue_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.columnValue_ = columnValue_; - } else { - result.columnValue_ = columnValueBuilder_.build(); - } - if (attributeBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008)) { - attribute_ = java.util.Collections.unmodifiableList(attribute_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.attribute_ = attribute_; - } else { - result.attribute_ = attributeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000004; - } - result.timestamp_ = timestamp_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000008; - } - result.lockId_ = lockId_; - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000010; - } - result.writeToWAL_ = writeToWAL_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000020; - } - if (timeRangeBuilder_ == null) { - result.timeRange_ = timeRange_; - } else { - result.timeRange_ = timeRangeBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasMutateType()) { - setMutateType(other.getMutateType()); - } - if (columnValueBuilder_ == null) { - if (!other.columnValue_.isEmpty()) { - if (columnValue_.isEmpty()) { - columnValue_ = other.columnValue_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureColumnValueIsMutable(); - columnValue_.addAll(other.columnValue_); - } - onChanged(); - } - } else { - if (!other.columnValue_.isEmpty()) { - if (columnValueBuilder_.isEmpty()) { - columnValueBuilder_.dispose(); - columnValueBuilder_ = null; - columnValue_ = other.columnValue_; - bitField0_ = (bitField0_ & ~0x00000004); - columnValueBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getColumnValueFieldBuilder() : null; - } else { - columnValueBuilder_.addAllMessages(other.columnValue_); - } - } - } - if (attributeBuilder_ == null) { - if (!other.attribute_.isEmpty()) { - if (attribute_.isEmpty()) { - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensureAttributeIsMutable(); - attribute_.addAll(other.attribute_); - } - onChanged(); - } - } else { - if (!other.attribute_.isEmpty()) { - if (attributeBuilder_.isEmpty()) { - attributeBuilder_.dispose(); - attributeBuilder_ = null; - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000008); - attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getAttributeFieldBuilder() : null; - } else { - attributeBuilder_.addAllMessages(other.attribute_); - } - } - } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } - if (other.hasLockId()) { - setLockId(other.getLockId()); - } - if (other.hasWriteToWAL()) { - setWriteToWAL(other.getWriteToWAL()); - } - if (other.hasTimeRange()) { - mergeTimeRange(other.getTimeRange()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasMutateType()) { - - return false; - } - for (int i = 0; i < getColumnValueCount(); i++) { - if (!getColumnValue(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - mutateType_ = value; - } - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumnValue(subBuilder.buildPartial()); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 40: { - bitField0_ |= 0x00000010; - timestamp_ = input.readUInt64(); - break; - } - case 48: { - bitField0_ |= 0x00000020; - lockId_ = input.readUInt64(); - break; - } - case 56: { - bitField0_ |= 0x00000040; - writeToWAL_ = input.readBool(); - break; - } - case 82: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required .Mutate.MutateType mutateType = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; - public boolean hasMutateType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType() { - return mutateType_; - } - public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - mutateType_ = value; - onChanged(); - return this; - } - public Builder clearMutateType() { - bitField0_ = (bitField0_ & ~0x00000002); - mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; - onChanged(); - return this; - } - - // repeated .Mutate.ColumnValue columnValue = 3; - private java.util.List columnValue_ = - java.util.Collections.emptyList(); - private void ensureColumnValueIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - columnValue_ = new java.util.ArrayList(columnValue_); - bitField0_ |= 0x00000004; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder> columnValueBuilder_; - - public java.util.List getColumnValueList() { - if (columnValueBuilder_ == null) { - return java.util.Collections.unmodifiableList(columnValue_); - } else { - return columnValueBuilder_.getMessageList(); - } - } - public int getColumnValueCount() { - if (columnValueBuilder_ == null) { - return columnValue_.size(); - } else { - return columnValueBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index) { - if (columnValueBuilder_ == null) { - return columnValue_.get(index); - } else { - return columnValueBuilder_.getMessage(index); - } - } - public Builder setColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { - if (columnValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnValueIsMutable(); - columnValue_.set(index, value); - onChanged(); - } else { - columnValueBuilder_.setMessage(index, value); - } - return this; - } - public Builder setColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - columnValue_.set(index, builderForValue.build()); - onChanged(); - } else { - columnValueBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { - if (columnValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnValueIsMutable(); - columnValue_.add(value); - onChanged(); - } else { - columnValueBuilder_.addMessage(value); - } - return this; - } - public Builder addColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { - if (columnValueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnValueIsMutable(); - columnValue_.add(index, value); - onChanged(); - } else { - columnValueBuilder_.addMessage(index, value); - } - return this; - } - public Builder addColumnValue( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - columnValue_.add(builderForValue.build()); - onChanged(); - } else { - columnValueBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addColumnValue( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - columnValue_.add(index, builderForValue.build()); - onChanged(); - } else { - columnValueBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllColumnValue( - java.lang.Iterable values) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - super.addAll(values, columnValue_); - onChanged(); - } else { - columnValueBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearColumnValue() { - if (columnValueBuilder_ == null) { - columnValue_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - } else { - columnValueBuilder_.clear(); - } - return this; - } - public Builder removeColumnValue(int index) { - if (columnValueBuilder_ == null) { - ensureColumnValueIsMutable(); - columnValue_.remove(index); - onChanged(); - } else { - columnValueBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder getColumnValueBuilder( - int index) { - return getColumnValueFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( - int index) { - if (columnValueBuilder_ == null) { - return columnValue_.get(index); } else { - return columnValueBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getColumnValueOrBuilderList() { - if (columnValueBuilder_ != null) { - return columnValueBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(columnValue_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder() { - return getColumnValueFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder( - int index) { - return getColumnValueFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()); - } - public java.util.List - getColumnValueBuilderList() { - return getColumnValueFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder> - getColumnValueFieldBuilder() { - if (columnValueBuilder_ == null) { - columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder>( - columnValue_, - ((bitField0_ & 0x00000004) == 0x00000004), - getParentForChildren(), - isClean()); - columnValue_ = null; - } - return columnValueBuilder_; - } - - // repeated .Attribute attribute = 4; - private java.util.List attribute_ = - java.util.Collections.emptyList(); - private void ensureAttributeIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - attribute_ = new java.util.ArrayList(attribute_); - bitField0_ |= 0x00000008; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; - - public java.util.List getAttributeList() { - if (attributeBuilder_ == null) { - return java.util.Collections.unmodifiableList(attribute_); - } else { - return attributeBuilder_.getMessageList(); - } - } - public int getAttributeCount() { - if (attributeBuilder_ == null) { - return attribute_.size(); - } else { - return attributeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); - } else { - return attributeBuilder_.getMessage(index); - } - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.set(index, value); - onChanged(); - } else { - attributeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.set(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(value); - onChanged(); - } else { - attributeBuilder_.addMessage(value); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(index, value); - onChanged(); - } else { - attributeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAttribute( - java.lang.Iterable values) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - super.addAll(values, attribute_); - onChanged(); - } else { - attributeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAttribute() { - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - } else { - attributeBuilder_.clear(); - } - return this; - } - public Builder removeAttribute(int index) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.remove(index); - onChanged(); - } else { - attributeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( - int index) { - return getAttributeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); } else { - return attributeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getAttributeOrBuilderList() { - if (attributeBuilder_ != null) { - return attributeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(attribute_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { - return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( - int index) { - return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); - } - public java.util.List - getAttributeBuilderList() { - return getAttributeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> - getAttributeFieldBuilder() { - if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( - attribute_, - ((bitField0_ & 0x00000008) == 0x00000008), - getParentForChildren(), - isClean()); - attribute_ = null; - } - return attributeBuilder_; - } - - // optional uint64 timestamp = 5; - private long timestamp_ ; - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public long getTimestamp() { - return timestamp_; - } - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000010; - timestamp_ = value; - onChanged(); - return this; - } - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000010); - timestamp_ = 0L; - onChanged(); - return this; - } - - // optional uint64 lockId = 6; - private long lockId_ ; - public boolean hasLockId() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public long getLockId() { - return lockId_; - } - public Builder setLockId(long value) { - bitField0_ |= 0x00000020; - lockId_ = value; - onChanged(); - return this; - } - public Builder clearLockId() { - bitField0_ = (bitField0_ & ~0x00000020); - lockId_ = 0L; - onChanged(); - return this; - } - - // optional bool writeToWAL = 7 [default = true]; - private boolean writeToWAL_ = true; - public boolean hasWriteToWAL() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public boolean getWriteToWAL() { - return writeToWAL_; - } - public Builder setWriteToWAL(boolean value) { - bitField0_ |= 0x00000040; - writeToWAL_ = value; - onChanged(); - return this; - } - public Builder clearWriteToWAL() { - bitField0_ = (bitField0_ & ~0x00000040); - writeToWAL_ = true; - onChanged(); - return this; - } - - // optional .TimeRange timeRange = 10; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - if (timeRangeBuilder_ == null) { - return timeRange_; - } else { - return timeRangeBuilder_.getMessage(); - } - } - public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - timeRange_ = value; - onChanged(); - } else { - timeRangeBuilder_.setMessage(value); - } - bitField0_ |= 0x00000080; - return this; - } - public Builder setTimeRange( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { - if (timeRangeBuilder_ == null) { - timeRange_ = builderForValue.build(); - onChanged(); - } else { - timeRangeBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000080; - return this; - } - public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (((bitField0_ & 0x00000080) == 0x00000080) && - timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { - timeRange_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); - } else { - timeRange_ = value; - } - onChanged(); - } else { - timeRangeBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000080; - return this; - } - public Builder clearTimeRange() { - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - onChanged(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000080); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { - bitField0_ |= 0x00000080; - onChanged(); - return getTimeRangeFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - if (timeRangeBuilder_ != null) { - return timeRangeBuilder_.getMessageOrBuilder(); - } else { - return timeRange_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> - getTimeRangeFieldBuilder() { - if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, - getParentForChildren(), - isClean()); - timeRange_ = null; - } - return timeRangeBuilder_; - } - - // @@protoc_insertion_point(builder_scope:Mutate) - } - - static { - defaultInstance = new Mutate(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Mutate) - } - - public interface MutateRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required .Mutate mutate = 2; - boolean hasMutate(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder(); - - // optional .Condition condition = 3; - boolean hasCondition(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder(); - } - public static final class MutateRequest extends - com.google.protobuf.GeneratedMessage - implements MutateRequestOrBuilder { - // Use MutateRequest.newBuilder() to construct. - private MutateRequest(Builder builder) { - super(builder); - } - private MutateRequest(boolean noInit) {} - - private static final MutateRequest defaultInstance; - public static MutateRequest getDefaultInstance() { - return defaultInstance; - } - - public MutateRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required .Mutate mutate = 2; - public static final int MUTATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate mutate_; - public boolean hasMutate() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate() { - return mutate_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder() { - return mutate_; - } - - // optional .Condition condition = 3; - public static final int CONDITION_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition condition_; - public boolean hasCondition() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition() { - return condition_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder() { - return condition_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); - condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMutate()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getMutate().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (hasCondition()) { - if (!getCondition().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, mutate_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(3, condition_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, mutate_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, condition_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasMutate() == other.hasMutate()); - if (hasMutate()) { - result = result && getMutate() - .equals(other.getMutate()); - } - result = result && (hasCondition() == other.hasCondition()); - if (hasCondition()) { - result = result && getCondition() - .equals(other.getCondition()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasMutate()) { - hash = (37 * hash) + MUTATE_FIELD_NUMBER; - hash = (53 * hash) + getMutate().hashCode(); - } - if (hasCondition()) { - hash = (37 * hash) + CONDITION_FIELD_NUMBER; - hash = (53 * hash) + getCondition().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getMutateFieldBuilder(); - getConditionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (mutateBuilder_ == null) { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); - } else { - mutateBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); - } else { - conditionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (mutateBuilder_ == null) { - result.mutate_ = mutate_; - } else { - result.mutate_ = mutateBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - if (conditionBuilder_ == null) { - result.condition_ = condition_; - } else { - result.condition_ = conditionBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasMutate()) { - mergeMutate(other.getMutate()); - } - if (other.hasCondition()) { - mergeCondition(other.getCondition()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasMutate()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (!getMutate().isInitialized()) { - - return false; - } - if (hasCondition()) { - if (!getCondition().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder(); - if (hasMutate()) { - subBuilder.mergeFrom(getMutate()); - } - input.readMessage(subBuilder, extensionRegistry); - setMutate(subBuilder.buildPartial()); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder(); - if (hasCondition()) { - subBuilder.mergeFrom(getCondition()); - } - input.readMessage(subBuilder, extensionRegistry); - setCondition(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required .Mutate mutate = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder> mutateBuilder_; - public boolean hasMutate() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate() { - if (mutateBuilder_ == null) { - return mutate_; - } else { - return mutateBuilder_.getMessage(); - } - } - public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate value) { - if (mutateBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - mutate_ = value; - onChanged(); - } else { - mutateBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setMutate( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder builderForValue) { - if (mutateBuilder_ == null) { - mutate_ = builderForValue.build(); - onChanged(); - } else { - mutateBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate value) { - if (mutateBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - mutate_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance()) { - mutate_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); - } else { - mutate_ = value; - } - onChanged(); - } else { - mutateBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearMutate() { - if (mutateBuilder_ == null) { - mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); - onChanged(); - } else { - mutateBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder getMutateBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getMutateFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder() { - if (mutateBuilder_ != null) { - return mutateBuilder_.getMessageOrBuilder(); - } else { - return mutate_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder> - getMutateFieldBuilder() { - if (mutateBuilder_ == null) { - mutateBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder>( - mutate_, - getParentForChildren(), - isClean()); - mutate_ = null; - } - return mutateBuilder_; - } - - // optional .Condition condition = 3; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder> conditionBuilder_; - public boolean hasCondition() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition() { - if (conditionBuilder_ == null) { - return condition_; - } else { - return conditionBuilder_.getMessage(); - } - } - public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition value) { - if (conditionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - condition_ = value; - onChanged(); - } else { - conditionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder setCondition( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder builderForValue) { - if (conditionBuilder_ == null) { - condition_ = builderForValue.build(); - onChanged(); - } else { - conditionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition value) { - if (conditionBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004) && - condition_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance()) { - condition_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); - } else { - condition_ = value; - } - onChanged(); - } else { - conditionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000004; - return this; - } - public Builder clearCondition() { - if (conditionBuilder_ == null) { - condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); - onChanged(); - } else { - conditionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder getConditionBuilder() { - bitField0_ |= 0x00000004; - onChanged(); - return getConditionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder() { - if (conditionBuilder_ != null) { - return conditionBuilder_.getMessageOrBuilder(); - } else { - return condition_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder> - getConditionFieldBuilder() { - if (conditionBuilder_ == null) { - conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder>( - condition_, - getParentForChildren(), - isClean()); - condition_ = null; - } - return conditionBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MutateRequest) - } - - static { - defaultInstance = new MutateRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MutateRequest) - } - - public interface MutateResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .Result result = 1; - boolean hasResult(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder(); - - // optional bool processed = 2; - boolean hasProcessed(); - boolean getProcessed(); - } - public static final class MutateResponse extends - com.google.protobuf.GeneratedMessage - implements MutateResponseOrBuilder { - // Use MutateResponse.newBuilder() to construct. - private MutateResponse(Builder builder) { - super(builder); - } - private MutateResponse(boolean noInit) {} - - private static final MutateResponse defaultInstance; - public static MutateResponse getDefaultInstance() { - return defaultInstance; - } - - public MutateResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_fieldAccessorTable; - } - - private int bitField0_; - // optional .Result result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_; - public boolean hasResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { - return result_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { - return result_; - } - - // optional bool processed = 2; - public static final int PROCESSED_FIELD_NUMBER = 2; - private boolean processed_; - public boolean hasProcessed() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getProcessed() { - return processed_; - } - - private void initFields() { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - processed_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasResult()) { - if (!getResult().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, result_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, processed_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, processed_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) obj; - - boolean result = true; - result = result && (hasResult() == other.hasResult()); - if (hasResult()) { - result = result && getResult() - .equals(other.getResult()); - } - result = result && (hasProcessed() == other.hasProcessed()); - if (hasProcessed()) { - result = result && (getProcessed() - == other.getProcessed()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasResult()) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResult().hashCode(); - } - if (hasProcessed()) { - hash = (37 * hash) + PROCESSED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getProcessed()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResultFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - } else { - resultBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - processed_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (resultBuilder_ == null) { - result.result_ = result_; - } else { - result.result_ = resultBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.processed_ = processed_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance()) return this; - if (other.hasResult()) { - mergeResult(other.getResult()); - } - if (other.hasProcessed()) { - setProcessed(other.getProcessed()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasResult()) { - if (!getResult().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); - if (hasResult()) { - subBuilder.mergeFrom(getResult()); - } - input.readMessage(subBuilder, extensionRegistry); - setResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - processed_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional .Result result = 1; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; - public boolean hasResult() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { - if (resultBuilder_ == null) { - return result_; - } else { - return resultBuilder_.getMessage(); - } - } - public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - result_ = value; - onChanged(); - } else { - resultBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setResult( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - result_ = builderForValue.build(); - onChanged(); - } else { - resultBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { - if (resultBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - result_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) { - result_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); - } else { - result_ = value; - } - onChanged(); - } else { - resultBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearResult() { - if (resultBuilder_ == null) { - result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); - onChanged(); - } else { - resultBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getResultFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { - if (resultBuilder_ != null) { - return resultBuilder_.getMessageOrBuilder(); - } else { - return result_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> - getResultFieldBuilder() { - if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( - result_, - getParentForChildren(), - isClean()); - result_ = null; - } - return resultBuilder_; - } - - // optional bool processed = 2; - private boolean processed_ ; - public boolean hasProcessed() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getProcessed() { - return processed_; - } - public Builder setProcessed(boolean value) { - bitField0_ |= 0x00000002; - processed_ = value; - onChanged(); - return this; - } - public Builder clearProcessed() { - bitField0_ = (bitField0_ & ~0x00000002); - processed_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:MutateResponse) - } - - static { - defaultInstance = new MutateResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MutateResponse) - } - - public interface ScanOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Column column = 1; - java.util.List - getColumnList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index); - int getColumnCount(); - java.util.List - getColumnOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index); - - // repeated .Attribute attribute = 2; - java.util.List - getAttributeList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); - int getAttributeCount(); - java.util.List - getAttributeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index); - - // optional bytes startRow = 3; - boolean hasStartRow(); - com.google.protobuf.ByteString getStartRow(); - - // optional bytes stopRow = 4; - boolean hasStopRow(); - com.google.protobuf.ByteString getStopRow(); - - // optional .Parameter filter = 5; - boolean hasFilter(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder(); - - // optional .TimeRange timeRange = 6; - boolean hasTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); - - // optional uint32 maxVersions = 7 [default = 1]; - boolean hasMaxVersions(); - int getMaxVersions(); - - // optional bool cacheBlocks = 8 [default = true]; - boolean hasCacheBlocks(); - boolean getCacheBlocks(); - - // optional uint32 rowsToCache = 9; - boolean hasRowsToCache(); - int getRowsToCache(); - - // optional uint32 batchSize = 10; - boolean hasBatchSize(); - int getBatchSize(); - } - public static final class Scan extends - com.google.protobuf.GeneratedMessage - implements ScanOrBuilder { - // Use Scan.newBuilder() to construct. - private Scan(Builder builder) { - super(builder); - } - private Scan(boolean noInit) {} - - private static final Scan defaultInstance; - public static Scan getDefaultInstance() { - return defaultInstance; - } - - public Scan getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_fieldAccessorTable; - } - - private int bitField0_; - // repeated .Column column = 1; - public static final int COLUMN_FIELD_NUMBER = 1; - private java.util.List column_; - public java.util.List getColumnList() { - return column_; - } - public java.util.List - getColumnOrBuilderList() { - return column_; - } - public int getColumnCount() { - return column_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { - return column_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index) { - return column_.get(index); - } - - // repeated .Attribute attribute = 2; - public static final int ATTRIBUTE_FIELD_NUMBER = 2; - private java.util.List attribute_; - public java.util.List getAttributeList() { - return attribute_; - } - public java.util.List - getAttributeOrBuilderList() { - return attribute_; - } - public int getAttributeCount() { - return attribute_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { - return attribute_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index) { - return attribute_.get(index); - } - - // optional bytes startRow = 3; - public static final int STARTROW_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString startRow_; - public boolean hasStartRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getStartRow() { - return startRow_; - } - - // optional bytes stopRow = 4; - public static final int STOPROW_FIELD_NUMBER = 4; - private com.google.protobuf.ByteString stopRow_; - public boolean hasStopRow() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getStopRow() { - return stopRow_; - } - - // optional .Parameter filter = 5; - public static final int FILTER_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { - return filter_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { - return filter_; - } - - // optional .TimeRange timeRange = 6; - public static final int TIMERANGE_FIELD_NUMBER = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - return timeRange_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - return timeRange_; - } - - // optional uint32 maxVersions = 7 [default = 1]; - public static final int MAXVERSIONS_FIELD_NUMBER = 7; - private int maxVersions_; - public boolean hasMaxVersions() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public int getMaxVersions() { - return maxVersions_; - } - - // optional bool cacheBlocks = 8 [default = true]; - public static final int CACHEBLOCKS_FIELD_NUMBER = 8; - private boolean cacheBlocks_; - public boolean hasCacheBlocks() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public boolean getCacheBlocks() { - return cacheBlocks_; - } - - // optional uint32 rowsToCache = 9; - public static final int ROWSTOCACHE_FIELD_NUMBER = 9; - private int rowsToCache_; - public boolean hasRowsToCache() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getRowsToCache() { - return rowsToCache_; - } - - // optional uint32 batchSize = 10; - public static final int BATCHSIZE_FIELD_NUMBER = 10; - private int batchSize_; - public boolean hasBatchSize() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public int getBatchSize() { - return batchSize_; - } - - private void initFields() { - column_ = java.util.Collections.emptyList(); - attribute_ = java.util.Collections.emptyList(); - startRow_ = com.google.protobuf.ByteString.EMPTY; - stopRow_ = com.google.protobuf.ByteString.EMPTY; - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - maxVersions_ = 1; - cacheBlocks_ = true; - rowsToCache_ = 0; - batchSize_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getColumnCount(); i++) { - if (!getColumn(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasFilter()) { - if (!getFilter().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < column_.size(); i++) { - output.writeMessage(1, column_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - output.writeMessage(2, attribute_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(3, startRow_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(4, stopRow_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(5, filter_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(6, timeRange_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeUInt32(7, maxVersions_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBool(8, cacheBlocks_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeUInt32(9, rowsToCache_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - output.writeUInt32(10, batchSize_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < column_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, column_.get(i)); - } - for (int i = 0; i < attribute_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, attribute_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, startRow_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, stopRow_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, filter_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, timeRange_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(7, maxVersions_); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(8, cacheBlocks_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(9, rowsToCache_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(10, batchSize_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan) obj; - - boolean result = true; - result = result && getColumnList() - .equals(other.getColumnList()); - result = result && getAttributeList() - .equals(other.getAttributeList()); - result = result && (hasStartRow() == other.hasStartRow()); - if (hasStartRow()) { - result = result && getStartRow() - .equals(other.getStartRow()); - } - result = result && (hasStopRow() == other.hasStopRow()); - if (hasStopRow()) { - result = result && getStopRow() - .equals(other.getStopRow()); - } - result = result && (hasFilter() == other.hasFilter()); - if (hasFilter()) { - result = result && getFilter() - .equals(other.getFilter()); - } - result = result && (hasTimeRange() == other.hasTimeRange()); - if (hasTimeRange()) { - result = result && getTimeRange() - .equals(other.getTimeRange()); - } - result = result && (hasMaxVersions() == other.hasMaxVersions()); - if (hasMaxVersions()) { - result = result && (getMaxVersions() - == other.getMaxVersions()); - } - result = result && (hasCacheBlocks() == other.hasCacheBlocks()); - if (hasCacheBlocks()) { - result = result && (getCacheBlocks() - == other.getCacheBlocks()); - } - result = result && (hasRowsToCache() == other.hasRowsToCache()); - if (hasRowsToCache()) { - result = result && (getRowsToCache() - == other.getRowsToCache()); - } - result = result && (hasBatchSize() == other.hasBatchSize()); - if (hasBatchSize()) { - result = result && (getBatchSize() - == other.getBatchSize()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getColumnCount() > 0) { - hash = (37 * hash) + COLUMN_FIELD_NUMBER; - hash = (53 * hash) + getColumnList().hashCode(); - } - if (getAttributeCount() > 0) { - hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; - hash = (53 * hash) + getAttributeList().hashCode(); - } - if (hasStartRow()) { - hash = (37 * hash) + STARTROW_FIELD_NUMBER; - hash = (53 * hash) + getStartRow().hashCode(); - } - if (hasStopRow()) { - hash = (37 * hash) + STOPROW_FIELD_NUMBER; - hash = (53 * hash) + getStopRow().hashCode(); - } - if (hasFilter()) { - hash = (37 * hash) + FILTER_FIELD_NUMBER; - hash = (53 * hash) + getFilter().hashCode(); - } - if (hasTimeRange()) { - hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; - hash = (53 * hash) + getTimeRange().hashCode(); - } - if (hasMaxVersions()) { - hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; - hash = (53 * hash) + getMaxVersions(); - } - if (hasCacheBlocks()) { - hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCacheBlocks()); - } - if (hasRowsToCache()) { - hash = (37 * hash) + ROWSTOCACHE_FIELD_NUMBER; - hash = (53 * hash) + getRowsToCache(); - } - if (hasBatchSize()) { - hash = (37 * hash) + BATCHSIZE_FIELD_NUMBER; - hash = (53 * hash) + getBatchSize(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getColumnFieldBuilder(); - getAttributeFieldBuilder(); - getFilterFieldBuilder(); - getTimeRangeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (columnBuilder_ == null) { - column_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - columnBuilder_.clear(); - } - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - attributeBuilder_.clear(); - } - startRow_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - stopRow_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000008); - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); - maxVersions_ = 1; - bitField0_ = (bitField0_ & ~0x00000040); - cacheBlocks_ = true; - bitField0_ = (bitField0_ & ~0x00000080); - rowsToCache_ = 0; - bitField0_ = (bitField0_ & ~0x00000100); - batchSize_ = 0; - bitField0_ = (bitField0_ & ~0x00000200); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (columnBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - column_ = java.util.Collections.unmodifiableList(column_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.column_ = column_; - } else { - result.column_ = columnBuilder_.build(); - } - if (attributeBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - attribute_ = java.util.Collections.unmodifiableList(attribute_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.attribute_ = attribute_; - } else { - result.attribute_ = attributeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000001; - } - result.startRow_ = startRow_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000002; - } - result.stopRow_ = stopRow_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000004; - } - if (filterBuilder_ == null) { - result.filter_ = filter_; - } else { - result.filter_ = filterBuilder_.build(); - } - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000008; - } - if (timeRangeBuilder_ == null) { - result.timeRange_ = timeRange_; - } else { - result.timeRange_ = timeRangeBuilder_.build(); - } - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000010; - } - result.maxVersions_ = maxVersions_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000020; - } - result.cacheBlocks_ = cacheBlocks_; - if (((from_bitField0_ & 0x00000100) == 0x00000100)) { - to_bitField0_ |= 0x00000040; - } - result.rowsToCache_ = rowsToCache_; - if (((from_bitField0_ & 0x00000200) == 0x00000200)) { - to_bitField0_ |= 0x00000080; - } - result.batchSize_ = batchSize_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance()) return this; - if (columnBuilder_ == null) { - if (!other.column_.isEmpty()) { - if (column_.isEmpty()) { - column_ = other.column_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureColumnIsMutable(); - column_.addAll(other.column_); - } - onChanged(); - } - } else { - if (!other.column_.isEmpty()) { - if (columnBuilder_.isEmpty()) { - columnBuilder_.dispose(); - columnBuilder_ = null; - column_ = other.column_; - bitField0_ = (bitField0_ & ~0x00000001); - columnBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getColumnFieldBuilder() : null; - } else { - columnBuilder_.addAllMessages(other.column_); - } - } - } - if (attributeBuilder_ == null) { - if (!other.attribute_.isEmpty()) { - if (attribute_.isEmpty()) { - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureAttributeIsMutable(); - attribute_.addAll(other.attribute_); - } - onChanged(); - } - } else { - if (!other.attribute_.isEmpty()) { - if (attributeBuilder_.isEmpty()) { - attributeBuilder_.dispose(); - attributeBuilder_ = null; - attribute_ = other.attribute_; - bitField0_ = (bitField0_ & ~0x00000002); - attributeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getAttributeFieldBuilder() : null; - } else { - attributeBuilder_.addAllMessages(other.attribute_); - } - } - } - if (other.hasStartRow()) { - setStartRow(other.getStartRow()); - } - if (other.hasStopRow()) { - setStopRow(other.getStopRow()); - } - if (other.hasFilter()) { - mergeFilter(other.getFilter()); - } - if (other.hasTimeRange()) { - mergeTimeRange(other.getTimeRange()); - } - if (other.hasMaxVersions()) { - setMaxVersions(other.getMaxVersions()); - } - if (other.hasCacheBlocks()) { - setCacheBlocks(other.getCacheBlocks()); - } - if (other.hasRowsToCache()) { - setRowsToCache(other.getRowsToCache()); - } - if (other.hasBatchSize()) { - setBatchSize(other.getBatchSize()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getColumnCount(); i++) { - if (!getColumn(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getAttributeCount(); i++) { - if (!getAttribute(i).isInitialized()) { - - return false; - } - } - if (hasFilter()) { - if (!getFilter().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addColumn(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addAttribute(subBuilder.buildPartial()); - break; - } - case 26: { - bitField0_ |= 0x00000004; - startRow_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - stopRow_ = input.readBytes(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); - if (hasFilter()) { - subBuilder.mergeFrom(getFilter()); - } - input.readMessage(subBuilder, extensionRegistry); - setFilter(subBuilder.buildPartial()); - break; - } - case 50: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); - if (hasTimeRange()) { - subBuilder.mergeFrom(getTimeRange()); - } - input.readMessage(subBuilder, extensionRegistry); - setTimeRange(subBuilder.buildPartial()); - break; - } - case 56: { - bitField0_ |= 0x00000040; - maxVersions_ = input.readUInt32(); - break; - } - case 64: { - bitField0_ |= 0x00000080; - cacheBlocks_ = input.readBool(); - break; - } - case 72: { - bitField0_ |= 0x00000100; - rowsToCache_ = input.readUInt32(); - break; - } - case 80: { - bitField0_ |= 0x00000200; - batchSize_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Column column = 1; - private java.util.List column_ = - java.util.Collections.emptyList(); - private void ensureColumnIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - column_ = new java.util.ArrayList(column_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> columnBuilder_; - - public java.util.List getColumnList() { - if (columnBuilder_ == null) { - return java.util.Collections.unmodifiableList(column_); - } else { - return columnBuilder_.getMessageList(); - } - } - public int getColumnCount() { - if (columnBuilder_ == null) { - return column_.size(); - } else { - return columnBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { - if (columnBuilder_ == null) { - return column_.get(index); - } else { - return columnBuilder_.getMessage(index); - } - } - public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.set(index, value); - onChanged(); - } else { - columnBuilder_.setMessage(index, value); - } - return this; - } - public Builder setColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.set(index, builderForValue.build()); - onChanged(); - } else { - columnBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.add(value); - onChanged(); - } else { - columnBuilder_.addMessage(value); - } - return this; - } - public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { - if (columnBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureColumnIsMutable(); - column_.add(index, value); - onChanged(); - } else { - columnBuilder_.addMessage(index, value); - } - return this; - } - public Builder addColumn( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.add(builderForValue.build()); - onChanged(); - } else { - columnBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addColumn( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.add(index, builderForValue.build()); - onChanged(); - } else { - columnBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllColumn( - java.lang.Iterable values) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - super.addAll(values, column_); - onChanged(); - } else { - columnBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearColumn() { - if (columnBuilder_ == null) { - column_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - columnBuilder_.clear(); - } - return this; - } - public Builder removeColumn(int index) { - if (columnBuilder_ == null) { - ensureColumnIsMutable(); - column_.remove(index); - onChanged(); - } else { - columnBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder getColumnBuilder( - int index) { - return getColumnFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( - int index) { - if (columnBuilder_ == null) { - return column_.get(index); } else { - return columnBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getColumnOrBuilderList() { - if (columnBuilder_ != null) { - return columnBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(column_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder() { - return getColumnFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder( - int index) { - return getColumnFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); - } - public java.util.List - getColumnBuilderList() { - return getColumnFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> - getColumnFieldBuilder() { - if (columnBuilder_ == null) { - columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder>( - column_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - column_ = null; - } - return columnBuilder_; - } - - // repeated .Attribute attribute = 2; - private java.util.List attribute_ = - java.util.Collections.emptyList(); - private void ensureAttributeIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - attribute_ = new java.util.ArrayList(attribute_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; - - public java.util.List getAttributeList() { - if (attributeBuilder_ == null) { - return java.util.Collections.unmodifiableList(attribute_); - } else { - return attributeBuilder_.getMessageList(); - } - } - public int getAttributeCount() { - if (attributeBuilder_ == null) { - return attribute_.size(); - } else { - return attributeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); - } else { - return attributeBuilder_.getMessage(index); - } - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.set(index, value); - onChanged(); - } else { - attributeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.set(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(value); - onChanged(); - } else { - attributeBuilder_.addMessage(value); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { - if (attributeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureAttributeIsMutable(); - attribute_.add(index, value); - onChanged(); - } else { - attributeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addAttribute( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addAttribute( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.add(index, builderForValue.build()); - onChanged(); - } else { - attributeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllAttribute( - java.lang.Iterable values) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - super.addAll(values, attribute_); - onChanged(); - } else { - attributeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearAttribute() { - if (attributeBuilder_ == null) { - attribute_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - attributeBuilder_.clear(); - } - return this; - } - public Builder removeAttribute(int index) { - if (attributeBuilder_ == null) { - ensureAttributeIsMutable(); - attribute_.remove(index); - onChanged(); - } else { - attributeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( - int index) { - return getAttributeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( - int index) { - if (attributeBuilder_ == null) { - return attribute_.get(index); } else { - return attributeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getAttributeOrBuilderList() { - if (attributeBuilder_ != null) { - return attributeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(attribute_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { - return getAttributeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( - int index) { - return getAttributeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); - } - public java.util.List - getAttributeBuilderList() { - return getAttributeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> - getAttributeFieldBuilder() { - if (attributeBuilder_ == null) { - attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( - attribute_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - attribute_ = null; - } - return attributeBuilder_; - } - - // optional bytes startRow = 3; - private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasStartRow() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getStartRow() { - return startRow_; - } - public Builder setStartRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - startRow_ = value; - onChanged(); - return this; - } - public Builder clearStartRow() { - bitField0_ = (bitField0_ & ~0x00000004); - startRow_ = getDefaultInstance().getStartRow(); - onChanged(); - return this; - } - - // optional bytes stopRow = 4; - private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasStopRow() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public com.google.protobuf.ByteString getStopRow() { - return stopRow_; - } - public Builder setStopRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - stopRow_ = value; - onChanged(); - return this; - } - public Builder clearStopRow() { - bitField0_ = (bitField0_ & ~0x00000008); - stopRow_ = getDefaultInstance().getStopRow(); - onChanged(); - return this; - } - - // optional .Parameter filter = 5; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> filterBuilder_; - public boolean hasFilter() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { - if (filterBuilder_ == null) { - return filter_; - } else { - return filterBuilder_.getMessage(); - } - } - public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (filterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - filter_ = value; - onChanged(); - } else { - filterBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setFilter( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (filterBuilder_ == null) { - filter_ = builderForValue.build(); - onChanged(); - } else { - filterBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (filterBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - filter_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { - filter_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(filter_).mergeFrom(value).buildPartial(); - } else { - filter_ = value; - } - onChanged(); - } else { - filterBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearFilter() { - if (filterBuilder_ == null) { - filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - onChanged(); - } else { - filterBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getFilterBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getFilterFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { - if (filterBuilder_ != null) { - return filterBuilder_.getMessageOrBuilder(); - } else { - return filter_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> - getFilterFieldBuilder() { - if (filterBuilder_ == null) { - filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( - filter_, - getParentForChildren(), - isClean()); - filter_ = null; - } - return filterBuilder_; - } - - // optional .TimeRange timeRange = 6; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; - public boolean hasTimeRange() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { - if (timeRangeBuilder_ == null) { - return timeRange_; - } else { - return timeRangeBuilder_.getMessage(); - } - } - public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - timeRange_ = value; - onChanged(); - } else { - timeRangeBuilder_.setMessage(value); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder setTimeRange( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { - if (timeRangeBuilder_ == null) { - timeRange_ = builderForValue.build(); - onChanged(); - } else { - timeRangeBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { - if (timeRangeBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020) && - timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { - timeRange_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); - } else { - timeRange_ = value; - } - onChanged(); - } else { - timeRangeBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000020; - return this; - } - public Builder clearTimeRange() { - if (timeRangeBuilder_ == null) { - timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); - onChanged(); - } else { - timeRangeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { - bitField0_ |= 0x00000020; - onChanged(); - return getTimeRangeFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { - if (timeRangeBuilder_ != null) { - return timeRangeBuilder_.getMessageOrBuilder(); - } else { - return timeRange_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> - getTimeRangeFieldBuilder() { - if (timeRangeBuilder_ == null) { - timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( - timeRange_, - getParentForChildren(), - isClean()); - timeRange_ = null; - } - return timeRangeBuilder_; - } - - // optional uint32 maxVersions = 7 [default = 1]; - private int maxVersions_ = 1; - public boolean hasMaxVersions() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - public int getMaxVersions() { - return maxVersions_; - } - public Builder setMaxVersions(int value) { - bitField0_ |= 0x00000040; - maxVersions_ = value; - onChanged(); - return this; - } - public Builder clearMaxVersions() { - bitField0_ = (bitField0_ & ~0x00000040); - maxVersions_ = 1; - onChanged(); - return this; - } - - // optional bool cacheBlocks = 8 [default = true]; - private boolean cacheBlocks_ = true; - public boolean hasCacheBlocks() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - public boolean getCacheBlocks() { - return cacheBlocks_; - } - public Builder setCacheBlocks(boolean value) { - bitField0_ |= 0x00000080; - cacheBlocks_ = value; - onChanged(); - return this; - } - public Builder clearCacheBlocks() { - bitField0_ = (bitField0_ & ~0x00000080); - cacheBlocks_ = true; - onChanged(); - return this; - } - - // optional uint32 rowsToCache = 9; - private int rowsToCache_ ; - public boolean hasRowsToCache() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - public int getRowsToCache() { - return rowsToCache_; - } - public Builder setRowsToCache(int value) { - bitField0_ |= 0x00000100; - rowsToCache_ = value; - onChanged(); - return this; - } - public Builder clearRowsToCache() { - bitField0_ = (bitField0_ & ~0x00000100); - rowsToCache_ = 0; - onChanged(); - return this; - } - - // optional uint32 batchSize = 10; - private int batchSize_ ; - public boolean hasBatchSize() { - return ((bitField0_ & 0x00000200) == 0x00000200); - } - public int getBatchSize() { - return batchSize_; - } - public Builder setBatchSize(int value) { - bitField0_ |= 0x00000200; - batchSize_ = value; - onChanged(); - return this; - } - public Builder clearBatchSize() { - bitField0_ = (bitField0_ & ~0x00000200); - batchSize_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Scan) - } - - static { - defaultInstance = new Scan(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Scan) - } - - public interface ScanRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional uint64 scannerId = 1; - boolean hasScannerId(); - long getScannerId(); - - // optional .Scan scan = 2; - boolean hasScan(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder(); - - // optional uint32 numberOfRows = 3; - boolean hasNumberOfRows(); - int getNumberOfRows(); - - // optional bool closeScanner = 4; - boolean hasCloseScanner(); - boolean getCloseScanner(); - } - public static final class ScanRequest extends - com.google.protobuf.GeneratedMessage - implements ScanRequestOrBuilder { - // Use ScanRequest.newBuilder() to construct. - private ScanRequest(Builder builder) { - super(builder); - } - private ScanRequest(boolean noInit) {} - - private static final ScanRequest defaultInstance; - public static ScanRequest getDefaultInstance() { - return defaultInstance; - } - - public ScanRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_fieldAccessorTable; - } - - private int bitField0_; - // optional uint64 scannerId = 1; - public static final int SCANNERID_FIELD_NUMBER = 1; - private long scannerId_; - public boolean hasScannerId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getScannerId() { - return scannerId_; - } - - // optional .Scan scan = 2; - public static final int SCAN_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan scan_; - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan() { - return scan_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder() { - return scan_; - } - - // optional uint32 numberOfRows = 3; - public static final int NUMBEROFROWS_FIELD_NUMBER = 3; - private int numberOfRows_; - public boolean hasNumberOfRows() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public int getNumberOfRows() { - return numberOfRows_; - } - - // optional bool closeScanner = 4; - public static final int CLOSESCANNER_FIELD_NUMBER = 4; - private boolean closeScanner_; - public boolean hasCloseScanner() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getCloseScanner() { - return closeScanner_; - } - - private void initFields() { - scannerId_ = 0L; - scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); - numberOfRows_ = 0; - closeScanner_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasScan()) { - if (!getScan().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, scannerId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, scan_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt32(3, numberOfRows_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBool(4, closeScanner_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, scannerId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, scan_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(3, numberOfRows_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(4, closeScanner_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest) obj; - - boolean result = true; - result = result && (hasScannerId() == other.hasScannerId()); - if (hasScannerId()) { - result = result && (getScannerId() - == other.getScannerId()); - } - result = result && (hasScan() == other.hasScan()); - if (hasScan()) { - result = result && getScan() - .equals(other.getScan()); - } - result = result && (hasNumberOfRows() == other.hasNumberOfRows()); - if (hasNumberOfRows()) { - result = result && (getNumberOfRows() - == other.getNumberOfRows()); - } - result = result && (hasCloseScanner() == other.hasCloseScanner()); - if (hasCloseScanner()) { - result = result && (getCloseScanner() - == other.getCloseScanner()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasScannerId()) { - hash = (37 * hash) + SCANNERID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getScannerId()); - } - if (hasScan()) { - hash = (37 * hash) + SCAN_FIELD_NUMBER; - hash = (53 * hash) + getScan().hashCode(); - } - if (hasNumberOfRows()) { - hash = (37 * hash) + NUMBEROFROWS_FIELD_NUMBER; - hash = (53 * hash) + getNumberOfRows(); - } - if (hasCloseScanner()) { - hash = (37 * hash) + CLOSESCANNER_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getCloseScanner()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getScanFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - scannerId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - numberOfRows_ = 0; - bitField0_ = (bitField0_ & ~0x00000004); - closeScanner_ = false; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.scannerId_ = scannerId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (scanBuilder_ == null) { - result.scan_ = scan_; - } else { - result.scan_ = scanBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.numberOfRows_ = numberOfRows_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.closeScanner_ = closeScanner_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance()) return this; - if (other.hasScannerId()) { - setScannerId(other.getScannerId()); - } - if (other.hasScan()) { - mergeScan(other.getScan()); - } - if (other.hasNumberOfRows()) { - setNumberOfRows(other.getNumberOfRows()); - } - if (other.hasCloseScanner()) { - setCloseScanner(other.getCloseScanner()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasScan()) { - if (!getScan().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - scannerId_ = input.readUInt64(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder(); - if (hasScan()) { - subBuilder.mergeFrom(getScan()); - } - input.readMessage(subBuilder, extensionRegistry); - setScan(subBuilder.buildPartial()); - break; - } - case 24: { - bitField0_ |= 0x00000004; - numberOfRows_ = input.readUInt32(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - closeScanner_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // optional uint64 scannerId = 1; - private long scannerId_ ; - public boolean hasScannerId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getScannerId() { - return scannerId_; - } - public Builder setScannerId(long value) { - bitField0_ |= 0x00000001; - scannerId_ = value; - onChanged(); - return this; - } - public Builder clearScannerId() { - bitField0_ = (bitField0_ & ~0x00000001); - scannerId_ = 0L; - onChanged(); - return this; - } - - // optional .Scan scan = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder> scanBuilder_; - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan() { - if (scanBuilder_ == null) { - return scan_; - } else { - return scanBuilder_.getMessage(); - } - } - public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - scan_ = value; - onChanged(); - } else { - scanBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setScan( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder builderForValue) { - if (scanBuilder_ == null) { - scan_ = builderForValue.build(); - onChanged(); - } else { - scanBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - scan_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance()) { - scan_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); - } else { - scan_ = value; - } - onChanged(); - } else { - scanBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearScan() { - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); - onChanged(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder getScanBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getScanFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder() { - if (scanBuilder_ != null) { - return scanBuilder_.getMessageOrBuilder(); - } else { - return scan_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder> - getScanFieldBuilder() { - if (scanBuilder_ == null) { - scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder>( - scan_, - getParentForChildren(), - isClean()); - scan_ = null; - } - return scanBuilder_; - } - - // optional uint32 numberOfRows = 3; - private int numberOfRows_ ; - public boolean hasNumberOfRows() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public int getNumberOfRows() { - return numberOfRows_; - } - public Builder setNumberOfRows(int value) { - bitField0_ |= 0x00000004; - numberOfRows_ = value; - onChanged(); - return this; - } - public Builder clearNumberOfRows() { - bitField0_ = (bitField0_ & ~0x00000004); - numberOfRows_ = 0; - onChanged(); - return this; - } - - // optional bool closeScanner = 4; - private boolean closeScanner_ ; - public boolean hasCloseScanner() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public boolean getCloseScanner() { - return closeScanner_; - } - public Builder setCloseScanner(boolean value) { - bitField0_ |= 0x00000008; - closeScanner_ = value; - onChanged(); - return this; - } - public Builder clearCloseScanner() { - bitField0_ = (bitField0_ & ~0x00000008); - closeScanner_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ScanRequest) - } - - static { - defaultInstance = new ScanRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ScanRequest) - } - - public interface ScanResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Result result = 1; - java.util.List - getResultList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index); - int getResultCount(); - java.util.List - getResultOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( - int index); - - // optional uint64 scannerId = 2; - boolean hasScannerId(); - long getScannerId(); - - // optional bool moreResults = 3; - boolean hasMoreResults(); - boolean getMoreResults(); - - // optional uint32 ttl = 4; - boolean hasTtl(); - int getTtl(); - } - public static final class ScanResponse extends - com.google.protobuf.GeneratedMessage - implements ScanResponseOrBuilder { - // Use ScanResponse.newBuilder() to construct. - private ScanResponse(Builder builder) { - super(builder); - } - private ScanResponse(boolean noInit) {} - - private static final ScanResponse defaultInstance; - public static ScanResponse getDefaultInstance() { - return defaultInstance; - } - - public ScanResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_fieldAccessorTable; - } - - private int bitField0_; - // repeated .Result result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private java.util.List result_; - public java.util.List getResultList() { - return result_; - } - public java.util.List - getResultOrBuilderList() { - return result_; - } - public int getResultCount() { - return result_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index) { - return result_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( - int index) { - return result_.get(index); - } - - // optional uint64 scannerId = 2; - public static final int SCANNERID_FIELD_NUMBER = 2; - private long scannerId_; - public boolean hasScannerId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getScannerId() { - return scannerId_; - } - - // optional bool moreResults = 3; - public static final int MORERESULTS_FIELD_NUMBER = 3; - private boolean moreResults_; - public boolean hasMoreResults() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getMoreResults() { - return moreResults_; - } - - // optional uint32 ttl = 4; - public static final int TTL_FIELD_NUMBER = 4; - private int ttl_; - public boolean hasTtl() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public int getTtl() { - return ttl_; - } - - private void initFields() { - result_ = java.util.Collections.emptyList(); - scannerId_ = 0L; - moreResults_ = false; - ttl_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getResultCount(); i++) { - if (!getResult(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < result_.size(); i++) { - output.writeMessage(1, result_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(2, scannerId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(3, moreResults_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt32(4, ttl_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < result_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, result_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, scannerId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, moreResults_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(4, ttl_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) obj; - - boolean result = true; - result = result && getResultList() - .equals(other.getResultList()); - result = result && (hasScannerId() == other.hasScannerId()); - if (hasScannerId()) { - result = result && (getScannerId() - == other.getScannerId()); - } - result = result && (hasMoreResults() == other.hasMoreResults()); - if (hasMoreResults()) { - result = result && (getMoreResults() - == other.getMoreResults()); - } - result = result && (hasTtl() == other.hasTtl()); - if (hasTtl()) { - result = result && (getTtl() - == other.getTtl()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getResultCount() > 0) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResultList().hashCode(); - } - if (hasScannerId()) { - hash = (37 * hash) + SCANNERID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getScannerId()); - } - if (hasMoreResults()) { - hash = (37 * hash) + MORERESULTS_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getMoreResults()); - } - if (hasTtl()) { - hash = (37 * hash) + TTL_FIELD_NUMBER; - hash = (53 * hash) + getTtl(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResultFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (resultBuilder_ == null) { - result_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - resultBuilder_.clear(); - } - scannerId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - moreResults_ = false; - bitField0_ = (bitField0_ & ~0x00000004); - ttl_ = 0; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (resultBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = java.util.Collections.unmodifiableList(result_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.result_ = result_; - } else { - result.result_ = resultBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000001; - } - result.scannerId_ = scannerId_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000002; - } - result.moreResults_ = moreResults_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000004; - } - result.ttl_ = ttl_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance()) return this; - if (resultBuilder_ == null) { - if (!other.result_.isEmpty()) { - if (result_.isEmpty()) { - result_ = other.result_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureResultIsMutable(); - result_.addAll(other.result_); - } - onChanged(); - } - } else { - if (!other.result_.isEmpty()) { - if (resultBuilder_.isEmpty()) { - resultBuilder_.dispose(); - resultBuilder_ = null; - result_ = other.result_; - bitField0_ = (bitField0_ & ~0x00000001); - resultBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getResultFieldBuilder() : null; - } else { - resultBuilder_.addAllMessages(other.result_); - } - } - } - if (other.hasScannerId()) { - setScannerId(other.getScannerId()); - } - if (other.hasMoreResults()) { - setMoreResults(other.getMoreResults()); - } - if (other.hasTtl()) { - setTtl(other.getTtl()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getResultCount(); i++) { - if (!getResult(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addResult(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - scannerId_ = input.readUInt64(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - moreResults_ = input.readBool(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - ttl_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Result result = 1; - private java.util.List result_ = - java.util.Collections.emptyList(); - private void ensureResultIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new java.util.ArrayList(result_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; - - public java.util.List getResultList() { - if (resultBuilder_ == null) { - return java.util.Collections.unmodifiableList(result_); - } else { - return resultBuilder_.getMessageList(); - } - } - public int getResultCount() { - if (resultBuilder_ == null) { - return result_.size(); - } else { - return resultBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index) { - if (resultBuilder_ == null) { - return result_.get(index); - } else { - return resultBuilder_.getMessage(index); - } - } - public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.set(index, value); - onChanged(); - } else { - resultBuilder_.setMessage(index, value); - } - return this; - } - public Builder setResult( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.set(index, builderForValue.build()); - onChanged(); - } else { - resultBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(value); - onChanged(); - } else { - resultBuilder_.addMessage(value); - } - return this; - } - public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { - if (resultBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(index, value); - onChanged(); - } else { - resultBuilder_.addMessage(index, value); - } - return this; - } - public Builder addResult( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.add(builderForValue.build()); - onChanged(); - } else { - resultBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addResult( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.add(index, builderForValue.build()); - onChanged(); - } else { - resultBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllResult( - java.lang.Iterable values) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - super.addAll(values, result_); - onChanged(); - } else { - resultBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearResult() { - if (resultBuilder_ == null) { - result_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - resultBuilder_.clear(); - } - return this; - } - public Builder removeResult(int index) { - if (resultBuilder_ == null) { - ensureResultIsMutable(); - result_.remove(index); - onChanged(); - } else { - resultBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder( - int index) { - return getResultFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( - int index) { - if (resultBuilder_ == null) { - return result_.get(index); } else { - return resultBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getResultOrBuilderList() { - if (resultBuilder_ != null) { - return resultBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(result_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder addResultBuilder() { - return getResultFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder addResultBuilder( - int index) { - return getResultFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()); - } - public java.util.List - getResultBuilderList() { - return getResultFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> - getResultFieldBuilder() { - if (resultBuilder_ == null) { - resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( - result_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - result_ = null; - } - return resultBuilder_; - } - - // optional uint64 scannerId = 2; - private long scannerId_ ; - public boolean hasScannerId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getScannerId() { - return scannerId_; - } - public Builder setScannerId(long value) { - bitField0_ |= 0x00000002; - scannerId_ = value; - onChanged(); - return this; - } - public Builder clearScannerId() { - bitField0_ = (bitField0_ & ~0x00000002); - scannerId_ = 0L; - onChanged(); - return this; - } - - // optional bool moreResults = 3; - private boolean moreResults_ ; - public boolean hasMoreResults() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public boolean getMoreResults() { - return moreResults_; - } - public Builder setMoreResults(boolean value) { - bitField0_ |= 0x00000004; - moreResults_ = value; - onChanged(); - return this; - } - public Builder clearMoreResults() { - bitField0_ = (bitField0_ & ~0x00000004); - moreResults_ = false; - onChanged(); - return this; - } - - // optional uint32 ttl = 4; - private int ttl_ ; - public boolean hasTtl() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public int getTtl() { - return ttl_; - } - public Builder setTtl(int value) { - bitField0_ |= 0x00000008; - ttl_ = value; - onChanged(); - return this; - } - public Builder clearTtl() { - bitField0_ = (bitField0_ & ~0x00000008); - ttl_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ScanResponse) - } - - static { - defaultInstance = new ScanResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ScanResponse) - } - - public interface LockRowRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // repeated bytes row = 2; - java.util.List getRowList(); - int getRowCount(); - com.google.protobuf.ByteString getRow(int index); - } - public static final class LockRowRequest extends - com.google.protobuf.GeneratedMessage - implements LockRowRequestOrBuilder { - // Use LockRowRequest.newBuilder() to construct. - private LockRowRequest(Builder builder) { - super(builder); - } - private LockRowRequest(boolean noInit) {} - - private static final LockRowRequest defaultInstance; - public static LockRowRequest getDefaultInstance() { - return defaultInstance; - } - - public LockRowRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // repeated bytes row = 2; - public static final int ROW_FIELD_NUMBER = 2; - private java.util.List row_; - public java.util.List - getRowList() { - return row_; - } - public int getRowCount() { - return row_.size(); - } - public com.google.protobuf.ByteString getRow(int index) { - return row_.get(index); - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - row_ = java.util.Collections.emptyList();; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - for (int i = 0; i < row_.size(); i++) { - output.writeBytes(2, row_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - { - int dataSize = 0; - for (int i = 0; i < row_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(row_.get(i)); - } - size += dataSize; - size += 1 * getRowList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && getRowList() - .equals(other.getRowList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (getRowCount() > 0) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRowList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - row_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - row_ = java.util.Collections.unmodifiableList(row_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.row_ = row_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (!other.row_.isEmpty()) { - if (row_.isEmpty()) { - row_ = other.row_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureRowIsMutable(); - row_.addAll(other.row_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - ensureRowIsMutable(); - row_.add(input.readBytes()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // repeated bytes row = 2; - private java.util.List row_ = java.util.Collections.emptyList();; - private void ensureRowIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - row_ = new java.util.ArrayList(row_); - bitField0_ |= 0x00000002; - } - } - public java.util.List - getRowList() { - return java.util.Collections.unmodifiableList(row_); - } - public int getRowCount() { - return row_.size(); - } - public com.google.protobuf.ByteString getRow(int index) { - return row_.get(index); - } - public Builder setRow( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureRowIsMutable(); - row_.set(index, value); - onChanged(); - return this; - } - public Builder addRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureRowIsMutable(); - row_.add(value); - onChanged(); - return this; - } - public Builder addAllRow( - java.lang.Iterable values) { - ensureRowIsMutable(); - super.addAll(values, row_); - onChanged(); - return this; - } - public Builder clearRow() { - row_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:LockRowRequest) - } - - static { - defaultInstance = new LockRowRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:LockRowRequest) - } - - public interface LockRowResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 lockId = 1; - boolean hasLockId(); - long getLockId(); - - // optional uint32 ttl = 2; - boolean hasTtl(); - int getTtl(); - } - public static final class LockRowResponse extends - com.google.protobuf.GeneratedMessage - implements LockRowResponseOrBuilder { - // Use LockRowResponse.newBuilder() to construct. - private LockRowResponse(Builder builder) { - super(builder); - } - private LockRowResponse(boolean noInit) {} - - private static final LockRowResponse defaultInstance; - public static LockRowResponse getDefaultInstance() { - return defaultInstance; - } - - public LockRowResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 lockId = 1; - public static final int LOCKID_FIELD_NUMBER = 1; - private long lockId_; - public boolean hasLockId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLockId() { - return lockId_; - } - - // optional uint32 ttl = 2; - public static final int TTL_FIELD_NUMBER = 2; - private int ttl_; - public boolean hasTtl() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getTtl() { - return ttl_; - } - - private void initFields() { - lockId_ = 0L; - ttl_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLockId()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, lockId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt32(2, ttl_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, lockId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, ttl_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) obj; - - boolean result = true; - result = result && (hasLockId() == other.hasLockId()); - if (hasLockId()) { - result = result && (getLockId() - == other.getLockId()); - } - result = result && (hasTtl() == other.hasTtl()); - if (hasTtl()) { - result = result && (getTtl() - == other.getTtl()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLockId()) { - hash = (37 * hash) + LOCKID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLockId()); - } - if (hasTtl()) { - hash = (37 * hash) + TTL_FIELD_NUMBER; - hash = (53 * hash) + getTtl(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - lockId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - ttl_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.lockId_ = lockId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.ttl_ = ttl_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance()) return this; - if (other.hasLockId()) { - setLockId(other.getLockId()); - } - if (other.hasTtl()) { - setTtl(other.getTtl()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLockId()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - lockId_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - ttl_ = input.readUInt32(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 lockId = 1; - private long lockId_ ; - public boolean hasLockId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLockId() { - return lockId_; - } - public Builder setLockId(long value) { - bitField0_ |= 0x00000001; - lockId_ = value; - onChanged(); - return this; - } - public Builder clearLockId() { - bitField0_ = (bitField0_ & ~0x00000001); - lockId_ = 0L; - onChanged(); - return this; - } - - // optional uint32 ttl = 2; - private int ttl_ ; - public boolean hasTtl() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public int getTtl() { - return ttl_; - } - public Builder setTtl(int value) { - bitField0_ |= 0x00000002; - ttl_ = value; - onChanged(); - return this; - } - public Builder clearTtl() { - bitField0_ = (bitField0_ & ~0x00000002); - ttl_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:LockRowResponse) - } - - static { - defaultInstance = new LockRowResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:LockRowResponse) - } - - public interface UnlockRowRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required uint64 lockId = 2; - boolean hasLockId(); - long getLockId(); - } - public static final class UnlockRowRequest extends - com.google.protobuf.GeneratedMessage - implements UnlockRowRequestOrBuilder { - // Use UnlockRowRequest.newBuilder() to construct. - private UnlockRowRequest(Builder builder) { - super(builder); - } - private UnlockRowRequest(boolean noInit) {} - - private static final UnlockRowRequest defaultInstance; - public static UnlockRowRequest getDefaultInstance() { - return defaultInstance; - } - - public UnlockRowRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required uint64 lockId = 2; - public static final int LOCKID_FIELD_NUMBER = 2; - private long lockId_; - public boolean hasLockId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getLockId() { - return lockId_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - lockId_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasLockId()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, lockId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, lockId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasLockId() == other.hasLockId()); - if (hasLockId()) { - result = result && (getLockId() - == other.getLockId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasLockId()) { - hash = (37 * hash) + LOCKID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLockId()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - lockId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.lockId_ = lockId_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasLockId()) { - setLockId(other.getLockId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasLockId()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - lockId_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required uint64 lockId = 2; - private long lockId_ ; - public boolean hasLockId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getLockId() { - return lockId_; - } - public Builder setLockId(long value) { - bitField0_ |= 0x00000002; - lockId_ = value; - onChanged(); - return this; - } - public Builder clearLockId() { - bitField0_ = (bitField0_ & ~0x00000002); - lockId_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:UnlockRowRequest) - } - - static { - defaultInstance = new UnlockRowRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UnlockRowRequest) - } - - public interface UnlockRowResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - public static final class UnlockRowResponse extends - com.google.protobuf.GeneratedMessage - implements UnlockRowResponseOrBuilder { - // Use UnlockRowResponse.newBuilder() to construct. - private UnlockRowResponse(Builder builder) { - super(builder); - } - private UnlockRowResponse(boolean noInit) {} - - private static final UnlockRowResponse defaultInstance; - public static UnlockRowResponse getDefaultInstance() { - return defaultInstance; - } - - public UnlockRowResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - } - } - } - - - // @@protoc_insertion_point(builder_scope:UnlockRowResponse) - } - - static { - defaultInstance = new UnlockRowResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UnlockRowResponse) - } - - public interface BulkLoadHFileRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; - java.util.List - getFamilyPathList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); - int getFamilyPathCount(); - java.util.List - getFamilyPathOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index); - } - public static final class BulkLoadHFileRequest extends - com.google.protobuf.GeneratedMessage - implements BulkLoadHFileRequestOrBuilder { - // Use BulkLoadHFileRequest.newBuilder() to construct. - private BulkLoadHFileRequest(Builder builder) { - super(builder); - } - private BulkLoadHFileRequest(boolean noInit) {} - - private static final BulkLoadHFileRequest defaultInstance; - public static BulkLoadHFileRequest getDefaultInstance() { - return defaultInstance; - } - - public BulkLoadHFileRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; - } - - public interface FamilyPathOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // required string path = 2; - boolean hasPath(); - String getPath(); - } - public static final class FamilyPath extends - com.google.protobuf.GeneratedMessage - implements FamilyPathOrBuilder { - // Use FamilyPath.newBuilder() to construct. - private FamilyPath(Builder builder) { - super(builder); - } - private FamilyPath(boolean noInit) {} - - private static final FamilyPath defaultInstance; - public static FamilyPath getDefaultInstance() { - return defaultInstance; - } - - public FamilyPath getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // required string path = 2; - public static final int PATH_FIELD_NUMBER = 2; - private java.lang.Object path_; - public boolean hasPath() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getPath() { - java.lang.Object ref = path_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - path_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getPathBytes() { - java.lang.Object ref = path_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - path_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - path_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasPath()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getPathBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getPathBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasPath() == other.hasPath()); - if (hasPath()) { - result = result && getPath() - .equals(other.getPath()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasPath()) { - hash = (37 * hash) + PATH_FIELD_NUMBER; - hash = (53 * hash) + getPath().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - path_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.path_ = path_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasPath()) { - setPath(other.getPath()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - if (!hasPath()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - path_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // required string path = 2; - private java.lang.Object path_ = ""; - public boolean hasPath() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getPath() { - java.lang.Object ref = path_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - path_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setPath(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - path_ = value; - onChanged(); - return this; - } - public Builder clearPath() { - bitField0_ = (bitField0_ & ~0x00000002); - path_ = getDefaultInstance().getPath(); - onChanged(); - return this; - } - void setPath(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - path_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest.FamilyPath) - } - - static { - defaultInstance = new FamilyPath(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest.FamilyPath) - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; - public static final int FAMILYPATH_FIELD_NUMBER = 2; - private java.util.List familyPath_; - public java.util.List getFamilyPathList() { - return familyPath_; - } - public java.util.List - getFamilyPathOrBuilderList() { - return familyPath_; - } - public int getFamilyPathCount() { - return familyPath_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { - return familyPath_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index) { - return familyPath_.get(index); - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - familyPath_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getFamilyPathCount(); i++) { - if (!getFamilyPath(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - for (int i = 0; i < familyPath_.size(); i++) { - output.writeMessage(2, familyPath_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - for (int i = 0; i < familyPath_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, familyPath_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && getFamilyPathList() - .equals(other.getFamilyPathList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (getFamilyPathCount() > 0) { - hash = (37 * hash) + FAMILYPATH_FIELD_NUMBER; - hash = (53 * hash) + getFamilyPathList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getFamilyPathFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (familyPathBuilder_ == null) { - familyPath_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - familyPathBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (familyPathBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - familyPath_ = java.util.Collections.unmodifiableList(familyPath_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.familyPath_ = familyPath_; - } else { - result.familyPath_ = familyPathBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (familyPathBuilder_ == null) { - if (!other.familyPath_.isEmpty()) { - if (familyPath_.isEmpty()) { - familyPath_ = other.familyPath_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureFamilyPathIsMutable(); - familyPath_.addAll(other.familyPath_); - } - onChanged(); - } - } else { - if (!other.familyPath_.isEmpty()) { - if (familyPathBuilder_.isEmpty()) { - familyPathBuilder_.dispose(); - familyPathBuilder_ = null; - familyPath_ = other.familyPath_; - bitField0_ = (bitField0_ & ~0x00000002); - familyPathBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getFamilyPathFieldBuilder() : null; - } else { - familyPathBuilder_.addAllMessages(other.familyPath_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - for (int i = 0; i < getFamilyPathCount(); i++) { - if (!getFamilyPath(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFamilyPath(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; - private java.util.List familyPath_ = - java.util.Collections.emptyList(); - private void ensureFamilyPathIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - familyPath_ = new java.util.ArrayList(familyPath_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; - - public java.util.List getFamilyPathList() { - if (familyPathBuilder_ == null) { - return java.util.Collections.unmodifiableList(familyPath_); - } else { - return familyPathBuilder_.getMessageList(); - } - } - public int getFamilyPathCount() { - if (familyPathBuilder_ == null) { - return familyPath_.size(); - } else { - return familyPathBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { - if (familyPathBuilder_ == null) { - return familyPath_.get(index); - } else { - return familyPathBuilder_.getMessage(index); - } - } - public Builder setFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.set(index, value); - onChanged(); - } else { - familyPathBuilder_.setMessage(index, value); - } - return this; - } - public Builder setFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.set(index, builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.add(value); - onChanged(); - } else { - familyPathBuilder_.addMessage(value); - } - return this; - } - public Builder addFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.add(index, value); - onChanged(); - } else { - familyPathBuilder_.addMessage(index, value); - } - return this; - } - public Builder addFamilyPath( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.add(builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.add(index, builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllFamilyPath( - java.lang.Iterable values) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - super.addAll(values, familyPath_); - onChanged(); - } else { - familyPathBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearFamilyPath() { - if (familyPathBuilder_ == null) { - familyPath_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - familyPathBuilder_.clear(); - } - return this; - } - public Builder removeFamilyPath(int index) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.remove(index); - onChanged(); - } else { - familyPathBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( - int index) { - return getFamilyPathFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index) { - if (familyPathBuilder_ == null) { - return familyPath_.get(index); } else { - return familyPathBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getFamilyPathOrBuilderList() { - if (familyPathBuilder_ != null) { - return familyPathBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(familyPath_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { - return getFamilyPathFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( - int index) { - return getFamilyPathFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); - } - public java.util.List - getFamilyPathBuilderList() { - return getFamilyPathFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> - getFamilyPathFieldBuilder() { - if (familyPathBuilder_ == null) { - familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( - familyPath_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - familyPath_ = null; - } - return familyPathBuilder_; - } - - // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest) - } - - static { - defaultInstance = new BulkLoadHFileRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest) - } - - public interface BulkLoadHFileResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool loaded = 1; - boolean hasLoaded(); - boolean getLoaded(); - } - public static final class BulkLoadHFileResponse extends - com.google.protobuf.GeneratedMessage - implements BulkLoadHFileResponseOrBuilder { - // Use BulkLoadHFileResponse.newBuilder() to construct. - private BulkLoadHFileResponse(Builder builder) { - super(builder); - } - private BulkLoadHFileResponse(boolean noInit) {} - - private static final BulkLoadHFileResponse defaultInstance; - public static BulkLoadHFileResponse getDefaultInstance() { - return defaultInstance; - } - - public BulkLoadHFileResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; - } - - private int bitField0_; - // required bool loaded = 1; - public static final int LOADED_FIELD_NUMBER = 1; - private boolean loaded_; - public boolean hasLoaded() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getLoaded() { - return loaded_; - } - - private void initFields() { - loaded_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLoaded()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, loaded_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, loaded_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) obj; - - boolean result = true; - result = result && (hasLoaded() == other.hasLoaded()); - if (hasLoaded()) { - result = result && (getLoaded() - == other.getLoaded()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLoaded()) { - hash = (37 * hash) + LOADED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLoaded()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - loaded_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.loaded_ = loaded_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; - if (other.hasLoaded()) { - setLoaded(other.getLoaded()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLoaded()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - loaded_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // required bool loaded = 1; - private boolean loaded_ ; - public boolean hasLoaded() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getLoaded() { - return loaded_; - } - public Builder setLoaded(boolean value) { - bitField0_ |= 0x00000001; - loaded_ = value; - onChanged(); - return this; - } - public Builder clearLoaded() { - bitField0_ = (bitField0_ & ~0x00000001); - loaded_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:BulkLoadHFileResponse) - } - - static { - defaultInstance = new BulkLoadHFileResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse) - } - - public interface ParameterOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string type = 1; - boolean hasType(); - String getType(); - - // optional bytes binaryValue = 2; - boolean hasBinaryValue(); - com.google.protobuf.ByteString getBinaryValue(); - } - public static final class Parameter extends - com.google.protobuf.GeneratedMessage - implements ParameterOrBuilder { - // Use Parameter.newBuilder() to construct. - private Parameter(Builder builder) { - super(builder); - } - private Parameter(boolean noInit) {} - - private static final Parameter defaultInstance; - public static Parameter getDefaultInstance() { - return defaultInstance; - } - - public Parameter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_fieldAccessorTable; - } - - private int bitField0_; - // required string type = 1; - public static final int TYPE_FIELD_NUMBER = 1; - private java.lang.Object type_; - public boolean hasType() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getType() { - java.lang.Object ref = type_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - type_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getTypeBytes() { - java.lang.Object ref = type_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - type_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes binaryValue = 2; - public static final int BINARYVALUE_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString binaryValue_; - public boolean hasBinaryValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getBinaryValue() { - return binaryValue_; - } - - private void initFields() { - type_ = ""; - binaryValue_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasType()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getTypeBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, binaryValue_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getTypeBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, binaryValue_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter) obj; - - boolean result = true; - result = result && (hasType() == other.hasType()); - if (hasType()) { - result = result && getType() - .equals(other.getType()); - } - result = result && (hasBinaryValue() == other.hasBinaryValue()); - if (hasBinaryValue()) { - result = result && getBinaryValue() - .equals(other.getBinaryValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasType()) { - hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + getType().hashCode(); - } - if (hasBinaryValue()) { - hash = (37 * hash) + BINARYVALUE_FIELD_NUMBER; - hash = (53 * hash) + getBinaryValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - type_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - binaryValue_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.type_ = type_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.binaryValue_ = binaryValue_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) return this; - if (other.hasType()) { - setType(other.getType()); - } - if (other.hasBinaryValue()) { - setBinaryValue(other.getBinaryValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasType()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - type_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - binaryValue_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string type = 1; - private java.lang.Object type_ = ""; - public boolean hasType() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getType() { - java.lang.Object ref = type_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - type_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setType(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - type_ = value; - onChanged(); - return this; - } - public Builder clearType() { - bitField0_ = (bitField0_ & ~0x00000001); - type_ = getDefaultInstance().getType(); - onChanged(); - return this; - } - void setType(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - type_ = value; - onChanged(); - } - - // optional bytes binaryValue = 2; - private com.google.protobuf.ByteString binaryValue_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasBinaryValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getBinaryValue() { - return binaryValue_; - } - public Builder setBinaryValue(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - binaryValue_ = value; - onChanged(); - return this; - } - public Builder clearBinaryValue() { - bitField0_ = (bitField0_ & ~0x00000002); - binaryValue_ = getDefaultInstance().getBinaryValue(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:Parameter) - } - - static { - defaultInstance = new Parameter(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Parameter) - } - - public interface PropertyOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string name = 1; - boolean hasName(); - String getName(); - - // required string value = 2; - boolean hasValue(); - String getValue(); - } - public static final class Property extends - com.google.protobuf.GeneratedMessage - implements PropertyOrBuilder { - // Use Property.newBuilder() to construct. - private Property(Builder builder) { - super(builder); - } - private Property(boolean noInit) {} - - private static final Property defaultInstance; - public static Property getDefaultInstance() { - return defaultInstance; - } - - public Property getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_fieldAccessorTable; - } - - private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - name_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required string value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private java.lang.Object value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getValue() { - java.lang.Object ref = value_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - value_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getValueBytes() { - java.lang.Object ref = value_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - value_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - name_ = ""; - value_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getValueBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getValueBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - value_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - if (!hasValue()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - value_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string name = 1; - private java.lang.Object name_ = ""; - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - name_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - void setName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - } - - // required string value = 2; - private java.lang.Object value_ = ""; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getValue() { - java.lang.Object ref = value_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - value_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setValue(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - return this; - } - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000002); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - void setValue(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - value_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:Property) - } - - static { - defaultInstance = new Property(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Property) - } - - public interface ExecOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - boolean hasRow(); - com.google.protobuf.ByteString getRow(); - - // required string protocolName = 2; - boolean hasProtocolName(); - String getProtocolName(); - - // required string methodName = 3; - boolean hasMethodName(); - String getMethodName(); - - // repeated .Property property = 4; - java.util.List - getPropertyList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index); - int getPropertyCount(); - java.util.List - getPropertyOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( - int index); - - // repeated .Parameter parameter = 5; - java.util.List - getParameterList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index); - int getParameterCount(); - java.util.List - getParameterOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( - int index); - } - public static final class Exec extends - com.google.protobuf.GeneratedMessage - implements ExecOrBuilder { - // Use Exec.newBuilder() to construct. - private Exec(Builder builder) { - super(builder); - } - private Exec(boolean noInit) {} - - private static final Exec defaultInstance; - public static Exec getDefaultInstance() { - return defaultInstance; - } - - public Exec getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_fieldAccessorTable; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required string protocolName = 2; - public static final int PROTOCOLNAME_FIELD_NUMBER = 2; - private java.lang.Object protocolName_; - public boolean hasProtocolName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getProtocolName() { - java.lang.Object ref = protocolName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - protocolName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getProtocolNameBytes() { - java.lang.Object ref = protocolName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - protocolName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required string methodName = 3; - public static final int METHODNAME_FIELD_NUMBER = 3; - private java.lang.Object methodName_; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - methodName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getMethodNameBytes() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - methodName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // repeated .Property property = 4; - public static final int PROPERTY_FIELD_NUMBER = 4; - private java.util.List property_; - public java.util.List getPropertyList() { - return property_; - } - public java.util.List - getPropertyOrBuilderList() { - return property_; - } - public int getPropertyCount() { - return property_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index) { - return property_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( - int index) { - return property_.get(index); - } - - // repeated .Parameter parameter = 5; - public static final int PARAMETER_FIELD_NUMBER = 5; - private java.util.List parameter_; - public java.util.List getParameterList() { - return parameter_; - } - public java.util.List - getParameterOrBuilderList() { - return parameter_; - } - public int getParameterCount() { - return parameter_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index) { - return parameter_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( - int index) { - return parameter_.get(index); - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - protocolName_ = ""; - methodName_ = ""; - property_ = java.util.Collections.emptyList(); - parameter_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasProtocolName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMethodName()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getPropertyCount(); i++) { - if (!getProperty(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getParameterCount(); i++) { - if (!getParameter(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getProtocolNameBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getMethodNameBytes()); - } - for (int i = 0; i < property_.size(); i++) { - output.writeMessage(4, property_.get(i)); - } - for (int i = 0; i < parameter_.size(); i++) { - output.writeMessage(5, parameter_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getProtocolNameBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getMethodNameBytes()); - } - for (int i = 0; i < property_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, property_.get(i)); - } - for (int i = 0; i < parameter_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, parameter_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasProtocolName() == other.hasProtocolName()); - if (hasProtocolName()) { - result = result && getProtocolName() - .equals(other.getProtocolName()); - } - result = result && (hasMethodName() == other.hasMethodName()); - if (hasMethodName()) { - result = result && getMethodName() - .equals(other.getMethodName()); - } - result = result && getPropertyList() - .equals(other.getPropertyList()); - result = result && getParameterList() - .equals(other.getParameterList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasProtocolName()) { - hash = (37 * hash) + PROTOCOLNAME_FIELD_NUMBER; - hash = (53 * hash) + getProtocolName().hashCode(); - } - if (hasMethodName()) { - hash = (37 * hash) + METHODNAME_FIELD_NUMBER; - hash = (53 * hash) + getMethodName().hashCode(); - } - if (getPropertyCount() > 0) { - hash = (37 * hash) + PROPERTY_FIELD_NUMBER; - hash = (53 * hash) + getPropertyList().hashCode(); - } - if (getParameterCount() > 0) { - hash = (37 * hash) + PARAMETER_FIELD_NUMBER; - hash = (53 * hash) + getParameterList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPropertyFieldBuilder(); - getParameterFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - protocolName_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - methodName_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); - if (propertyBuilder_ == null) { - property_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - } else { - propertyBuilder_.clear(); - } - if (parameterBuilder_ == null) { - parameter_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - } else { - parameterBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.protocolName_ = protocolName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.methodName_ = methodName_; - if (propertyBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008)) { - property_ = java.util.Collections.unmodifiableList(property_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.property_ = property_; - } else { - result.property_ = propertyBuilder_.build(); - } - if (parameterBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010)) { - parameter_ = java.util.Collections.unmodifiableList(parameter_); - bitField0_ = (bitField0_ & ~0x00000010); - } - result.parameter_ = parameter_; - } else { - result.parameter_ = parameterBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasProtocolName()) { - setProtocolName(other.getProtocolName()); - } - if (other.hasMethodName()) { - setMethodName(other.getMethodName()); - } - if (propertyBuilder_ == null) { - if (!other.property_.isEmpty()) { - if (property_.isEmpty()) { - property_ = other.property_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensurePropertyIsMutable(); - property_.addAll(other.property_); - } - onChanged(); - } - } else { - if (!other.property_.isEmpty()) { - if (propertyBuilder_.isEmpty()) { - propertyBuilder_.dispose(); - propertyBuilder_ = null; - property_ = other.property_; - bitField0_ = (bitField0_ & ~0x00000008); - propertyBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getPropertyFieldBuilder() : null; - } else { - propertyBuilder_.addAllMessages(other.property_); - } - } - } - if (parameterBuilder_ == null) { - if (!other.parameter_.isEmpty()) { - if (parameter_.isEmpty()) { - parameter_ = other.parameter_; - bitField0_ = (bitField0_ & ~0x00000010); - } else { - ensureParameterIsMutable(); - parameter_.addAll(other.parameter_); - } - onChanged(); - } - } else { - if (!other.parameter_.isEmpty()) { - if (parameterBuilder_.isEmpty()) { - parameterBuilder_.dispose(); - parameterBuilder_ = null; - parameter_ = other.parameter_; - bitField0_ = (bitField0_ & ~0x00000010); - parameterBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getParameterFieldBuilder() : null; - } else { - parameterBuilder_.addAllMessages(other.parameter_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasProtocolName()) { - - return false; - } - if (!hasMethodName()) { - - return false; - } - for (int i = 0; i < getPropertyCount(); i++) { - if (!getProperty(i).isInitialized()) { - - return false; - } - } - for (int i = 0; i < getParameterCount(); i++) { - if (!getParameter(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - protocolName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - methodName_ = input.readBytes(); - break; - } - case 34: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addProperty(subBuilder.buildPartial()); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addParameter(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRow() { - return row_; - } - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required string protocolName = 2; - private java.lang.Object protocolName_ = ""; - public boolean hasProtocolName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public String getProtocolName() { - java.lang.Object ref = protocolName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - protocolName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setProtocolName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - protocolName_ = value; - onChanged(); - return this; - } - public Builder clearProtocolName() { - bitField0_ = (bitField0_ & ~0x00000002); - protocolName_ = getDefaultInstance().getProtocolName(); - onChanged(); - return this; - } - void setProtocolName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000002; - protocolName_ = value; - onChanged(); - } - - // required string methodName = 3; - private java.lang.Object methodName_ = ""; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - methodName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setMethodName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - methodName_ = value; - onChanged(); - return this; - } - public Builder clearMethodName() { - bitField0_ = (bitField0_ & ~0x00000004); - methodName_ = getDefaultInstance().getMethodName(); - onChanged(); - return this; - } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; - methodName_ = value; - onChanged(); - } - - // repeated .Property property = 4; - private java.util.List property_ = - java.util.Collections.emptyList(); - private void ensurePropertyIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - property_ = new java.util.ArrayList(property_); - bitField0_ |= 0x00000008; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder> propertyBuilder_; - - public java.util.List getPropertyList() { - if (propertyBuilder_ == null) { - return java.util.Collections.unmodifiableList(property_); - } else { - return propertyBuilder_.getMessageList(); - } - } - public int getPropertyCount() { - if (propertyBuilder_ == null) { - return property_.size(); - } else { - return propertyBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index) { - if (propertyBuilder_ == null) { - return property_.get(index); - } else { - return propertyBuilder_.getMessage(index); - } - } - public Builder setProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { - if (propertyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePropertyIsMutable(); - property_.set(index, value); - onChanged(); - } else { - propertyBuilder_.setMessage(index, value); - } - return this; - } - public Builder setProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - property_.set(index, builderForValue.build()); - onChanged(); - } else { - propertyBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addProperty(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { - if (propertyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePropertyIsMutable(); - property_.add(value); - onChanged(); - } else { - propertyBuilder_.addMessage(value); - } - return this; - } - public Builder addProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { - if (propertyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensurePropertyIsMutable(); - property_.add(index, value); - onChanged(); - } else { - propertyBuilder_.addMessage(index, value); - } - return this; - } - public Builder addProperty( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - property_.add(builderForValue.build()); - onChanged(); - } else { - propertyBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addProperty( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - property_.add(index, builderForValue.build()); - onChanged(); - } else { - propertyBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllProperty( - java.lang.Iterable values) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - super.addAll(values, property_); - onChanged(); - } else { - propertyBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearProperty() { - if (propertyBuilder_ == null) { - property_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - } else { - propertyBuilder_.clear(); - } - return this; - } - public Builder removeProperty(int index) { - if (propertyBuilder_ == null) { - ensurePropertyIsMutable(); - property_.remove(index); - onChanged(); - } else { - propertyBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder getPropertyBuilder( - int index) { - return getPropertyFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( - int index) { - if (propertyBuilder_ == null) { - return property_.get(index); } else { - return propertyBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getPropertyOrBuilderList() { - if (propertyBuilder_ != null) { - return propertyBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(property_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder addPropertyBuilder() { - return getPropertyFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder addPropertyBuilder( - int index) { - return getPropertyFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()); - } - public java.util.List - getPropertyBuilderList() { - return getPropertyFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder> - getPropertyFieldBuilder() { - if (propertyBuilder_ == null) { - propertyBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder>( - property_, - ((bitField0_ & 0x00000008) == 0x00000008), - getParentForChildren(), - isClean()); - property_ = null; - } - return propertyBuilder_; - } - - // repeated .Parameter parameter = 5; - private java.util.List parameter_ = - java.util.Collections.emptyList(); - private void ensureParameterIsMutable() { - if (!((bitField0_ & 0x00000010) == 0x00000010)) { - parameter_ = new java.util.ArrayList(parameter_); - bitField0_ |= 0x00000010; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> parameterBuilder_; - - public java.util.List getParameterList() { - if (parameterBuilder_ == null) { - return java.util.Collections.unmodifiableList(parameter_); - } else { - return parameterBuilder_.getMessageList(); - } - } - public int getParameterCount() { - if (parameterBuilder_ == null) { - return parameter_.size(); - } else { - return parameterBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index) { - if (parameterBuilder_ == null) { - return parameter_.get(index); - } else { - return parameterBuilder_.getMessage(index); - } - } - public Builder setParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (parameterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureParameterIsMutable(); - parameter_.set(index, value); - onChanged(); - } else { - parameterBuilder_.setMessage(index, value); - } - return this; - } - public Builder setParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - parameter_.set(index, builderForValue.build()); - onChanged(); - } else { - parameterBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addParameter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (parameterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureParameterIsMutable(); - parameter_.add(value); - onChanged(); - } else { - parameterBuilder_.addMessage(value); - } - return this; - } - public Builder addParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (parameterBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureParameterIsMutable(); - parameter_.add(index, value); - onChanged(); - } else { - parameterBuilder_.addMessage(index, value); - } - return this; - } - public Builder addParameter( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - parameter_.add(builderForValue.build()); - onChanged(); - } else { - parameterBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addParameter( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - parameter_.add(index, builderForValue.build()); - onChanged(); - } else { - parameterBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllParameter( - java.lang.Iterable values) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - super.addAll(values, parameter_); - onChanged(); - } else { - parameterBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearParameter() { - if (parameterBuilder_ == null) { - parameter_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - onChanged(); - } else { - parameterBuilder_.clear(); - } - return this; - } - public Builder removeParameter(int index) { - if (parameterBuilder_ == null) { - ensureParameterIsMutable(); - parameter_.remove(index); - onChanged(); - } else { - parameterBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getParameterBuilder( - int index) { - return getParameterFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( - int index) { - if (parameterBuilder_ == null) { - return parameter_.get(index); } else { - return parameterBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getParameterOrBuilderList() { - if (parameterBuilder_ != null) { - return parameterBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(parameter_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addParameterBuilder() { - return getParameterFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addParameterBuilder( - int index) { - return getParameterFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); - } - public java.util.List - getParameterBuilderList() { - return getParameterFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> - getParameterFieldBuilder() { - if (parameterBuilder_ == null) { - parameterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( - parameter_, - ((bitField0_ & 0x00000010) == 0x00000010), - getParentForChildren(), - isClean()); - parameter_ = null; - } - return parameterBuilder_; - } - - // @@protoc_insertion_point(builder_scope:Exec) - } - - static { - defaultInstance = new Exec(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:Exec) - } - - public interface ExecCoprocessorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .RegionSpecifier region = 1; - boolean hasRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); - - // required .Exec call = 2; - boolean hasCall(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder(); - } - public static final class ExecCoprocessorRequest extends - com.google.protobuf.GeneratedMessage - implements ExecCoprocessorRequestOrBuilder { - // Use ExecCoprocessorRequest.newBuilder() to construct. - private ExecCoprocessorRequest(Builder builder) { - super(builder); - } - private ExecCoprocessorRequest(boolean noInit) {} - - private static final ExecCoprocessorRequest defaultInstance; - public static ExecCoprocessorRequest getDefaultInstance() { - return defaultInstance; - } - - public ExecCoprocessorRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; - } - - private int bitField0_; - // required .RegionSpecifier region = 1; - public static final int REGION_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - return region_; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - return region_; - } - - // required .Exec call = 2; - public static final int CALL_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec call_; - public boolean hasCall() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall() { - return call_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder() { - return call_; - } - - private void initFields() { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegion()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasCall()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRegion().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getCall().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, call_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, region_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, call_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest) obj; - - boolean result = true; - result = result && (hasRegion() == other.hasRegion()); - if (hasRegion()) { - result = result && getRegion() - .equals(other.getRegion()); - } - result = result && (hasCall() == other.hasCall()); - if (hasCall()) { - result = result && getCall() - .equals(other.getCall()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegion()) { - hash = (37 * hash) + REGION_FIELD_NUMBER; - hash = (53 * hash) + getRegion().hashCode(); - } - if (hasCall()) { - hash = (37 * hash) + CALL_FIELD_NUMBER; - hash = (53 * hash) + getCall().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRegionFieldBuilder(); - getCallFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); - } else { - callBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (regionBuilder_ == null) { - result.region_ = region_; - } else { - result.region_ = regionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (callBuilder_ == null) { - result.call_ = call_; - } else { - result.call_ = callBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance()) return this; - if (other.hasRegion()) { - mergeRegion(other.getRegion()); - } - if (other.hasCall()) { - mergeCall(other.getCall()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegion()) { - - return false; - } - if (!hasCall()) { - - return false; - } - if (!getRegion().isInitialized()) { - - return false; - } - if (!getCall().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); - if (hasRegion()) { - subBuilder.mergeFrom(getRegion()); - } - input.readMessage(subBuilder, extensionRegistry); - setRegion(subBuilder.buildPartial()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder(); - if (hasCall()) { - subBuilder.mergeFrom(getCall()); - } - input.readMessage(subBuilder, extensionRegistry); - setCall(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required .RegionSpecifier region = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; - public boolean hasRegion() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { - if (regionBuilder_ == null) { - return region_; - } else { - return regionBuilder_.getMessage(); - } - } - public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - region_ = value; - onChanged(); - } else { - regionBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder setRegion( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { - if (regionBuilder_ == null) { - region_ = builderForValue.build(); - onChanged(); - } else { - regionBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { - if (regionBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { - region_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); - } else { - region_ = value; - } - onChanged(); - } else { - regionBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - public Builder clearRegion() { - if (regionBuilder_ == null) { - region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - onChanged(); - } else { - regionBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRegionFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { - if (regionBuilder_ != null) { - return regionBuilder_.getMessageOrBuilder(); - } else { - return region_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> - getRegionFieldBuilder() { - if (regionBuilder_ == null) { - regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - region_, - getParentForChildren(), - isClean()); - region_ = null; - } - return regionBuilder_; - } - - // required .Exec call = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder> callBuilder_; - public boolean hasCall() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall() { - if (callBuilder_ == null) { - return call_; - } else { - return callBuilder_.getMessage(); - } - } - public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec value) { - if (callBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - call_ = value; - onChanged(); - } else { - callBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setCall( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder builderForValue) { - if (callBuilder_ == null) { - call_ = builderForValue.build(); - onChanged(); - } else { - callBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec value) { - if (callBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - call_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance()) { - call_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder(call_).mergeFrom(value).buildPartial(); - } else { - call_ = value; - } - onChanged(); - } else { - callBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearCall() { - if (callBuilder_ == null) { - call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); - onChanged(); - } else { - callBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder getCallBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getCallFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder() { - if (callBuilder_ != null) { - return callBuilder_.getMessageOrBuilder(); - } else { - return call_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder> - getCallFieldBuilder() { - if (callBuilder_ == null) { - callBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder>( - call_, - getParentForChildren(), - isClean()); - call_ = null; - } - return callBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ExecCoprocessorRequest) - } - - static { - defaultInstance = new ExecCoprocessorRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ExecCoprocessorRequest) - } - - public interface ExecCoprocessorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes regionName = 1; - boolean hasRegionName(); - com.google.protobuf.ByteString getRegionName(); - - // required .Parameter value = 2; - boolean hasValue(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder(); - } - public static final class ExecCoprocessorResponse extends - com.google.protobuf.GeneratedMessage - implements ExecCoprocessorResponseOrBuilder { - // Use ExecCoprocessorResponse.newBuilder() to construct. - private ExecCoprocessorResponse(Builder builder) { - super(builder); - } - private ExecCoprocessorResponse(boolean noInit) {} - - private static final ExecCoprocessorResponse defaultInstance; - public static ExecCoprocessorResponse getDefaultInstance() { - return defaultInstance; - } - - public ExecCoprocessorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; - } - - private int bitField0_; - // required bytes regionName = 1; - public static final int REGIONNAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString regionName_; - public boolean hasRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRegionName() { - return regionName_; - } - - // required .Parameter value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue() { - return value_; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder() { - return value_; - } - - private void initFields() { - regionName_ = com.google.protobuf.ByteString.EMPTY; - value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRegionName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - if (!getValue().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, regionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, value_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, regionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, value_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) obj; - - boolean result = true; - result = result && (hasRegionName() == other.hasRegionName()); - if (hasRegionName()) { - result = result && getRegionName() - .equals(other.getRegionName()); - } - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRegionName()) { - hash = (37 * hash) + REGIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getRegionName().hashCode(); - } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getValueFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - regionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.regionName_ = regionName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (valueBuilder_ == null) { - result.value_ = value_; - } else { - result.value_ = valueBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance()) return this; - if (other.hasRegionName()) { - setRegionName(other.getRegionName()); - } - if (other.hasValue()) { - mergeValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRegionName()) { - - return false; - } - if (!hasValue()) { - - return false; - } - if (!getValue().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - regionName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); - if (hasValue()) { - subBuilder.mergeFrom(getValue()); - } - input.readMessage(subBuilder, extensionRegistry); - setValue(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes regionName = 1; - private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getRegionName() { - return regionName_; - } - public Builder setRegionName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - regionName_ = value; - onChanged(); - return this; - } - public Builder clearRegionName() { - bitField0_ = (bitField0_ & ~0x00000001); - regionName_ = getDefaultInstance().getRegionName(); - onChanged(); - return this; - } - - // required .Parameter value = 2; - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> valueBuilder_; - public boolean hasValue() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue() { - if (valueBuilder_ == null) { - return value_; - } else { - return valueBuilder_.getMessage(); - } - } - public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - value_ = value; - onChanged(); - } else { - valueBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setValue( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (valueBuilder_ == null) { - value_ = builderForValue.build(); - onChanged(); - } else { - valueBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (valueBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - value_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { - value_ = - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(value_).mergeFrom(value).buildPartial(); - } else { - value_ = value; - } - onChanged(); - } else { - valueBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); - onChanged(); - } else { - valueBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getValueBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getValueFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilder(); - } else { - return value_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( - value_, - getParentForChildren(), - isClean()); - value_ = null; - } - return valueBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ExecCoprocessorResponse) - } - - static { - defaultInstance = new ExecCoprocessorResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ExecCoprocessorResponse) - } - - public interface MultiRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Parameter request = 1; - java.util.List - getRequestList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index); - int getRequestCount(); - java.util.List - getRequestOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( - int index); - - // optional bool atomic = 2; - boolean hasAtomic(); - boolean getAtomic(); - } - public static final class MultiRequest extends - com.google.protobuf.GeneratedMessage - implements MultiRequestOrBuilder { - // Use MultiRequest.newBuilder() to construct. - private MultiRequest(Builder builder) { - super(builder); - } - private MultiRequest(boolean noInit) {} - - private static final MultiRequest defaultInstance; - public static MultiRequest getDefaultInstance() { - return defaultInstance; - } - - public MultiRequest getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_fieldAccessorTable; - } - - private int bitField0_; - // repeated .Parameter request = 1; - public static final int REQUEST_FIELD_NUMBER = 1; - private java.util.List request_; - public java.util.List getRequestList() { - return request_; - } - public java.util.List - getRequestOrBuilderList() { - return request_; - } - public int getRequestCount() { - return request_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index) { - return request_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( - int index) { - return request_.get(index); - } - - // optional bool atomic = 2; - public static final int ATOMIC_FIELD_NUMBER = 2; - private boolean atomic_; - public boolean hasAtomic() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public boolean getAtomic() { - return atomic_; - } - - private void initFields() { - request_ = java.util.Collections.emptyList(); - atomic_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getRequestCount(); i++) { - if (!getRequest(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < request_.size(); i++) { - output.writeMessage(1, request_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(2, atomic_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < request_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, request_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, atomic_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest) obj; - - boolean result = true; - result = result && getRequestList() - .equals(other.getRequestList()); - result = result && (hasAtomic() == other.hasAtomic()); - if (hasAtomic()) { - result = result && (getAtomic() - == other.getAtomic()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getRequestCount() > 0) { - hash = (37 * hash) + REQUEST_FIELD_NUMBER; - hash = (53 * hash) + getRequestList().hashCode(); - } - if (hasAtomic()) { - hash = (37 * hash) + ATOMIC_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getAtomic()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRequestFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (requestBuilder_ == null) { - request_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - requestBuilder_.clear(); - } - atomic_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (requestBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - request_ = java.util.Collections.unmodifiableList(request_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.request_ = request_; - } else { - result.request_ = requestBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000001; - } - result.atomic_ = atomic_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance()) return this; - if (requestBuilder_ == null) { - if (!other.request_.isEmpty()) { - if (request_.isEmpty()) { - request_ = other.request_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureRequestIsMutable(); - request_.addAll(other.request_); - } - onChanged(); - } - } else { - if (!other.request_.isEmpty()) { - if (requestBuilder_.isEmpty()) { - requestBuilder_.dispose(); - requestBuilder_ = null; - request_ = other.request_; - bitField0_ = (bitField0_ & ~0x00000001); - requestBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getRequestFieldBuilder() : null; - } else { - requestBuilder_.addAllMessages(other.request_); - } - } - } - if (other.hasAtomic()) { - setAtomic(other.getAtomic()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getRequestCount(); i++) { - if (!getRequest(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addRequest(subBuilder.buildPartial()); - break; - } - case 16: { - bitField0_ |= 0x00000002; - atomic_ = input.readBool(); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Parameter request = 1; - private java.util.List request_ = - java.util.Collections.emptyList(); - private void ensureRequestIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - request_ = new java.util.ArrayList(request_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> requestBuilder_; - - public java.util.List getRequestList() { - if (requestBuilder_ == null) { - return java.util.Collections.unmodifiableList(request_); - } else { - return requestBuilder_.getMessageList(); - } - } - public int getRequestCount() { - if (requestBuilder_ == null) { - return request_.size(); - } else { - return requestBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index) { - if (requestBuilder_ == null) { - return request_.get(index); - } else { - return requestBuilder_.getMessage(index); - } - } - public Builder setRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (requestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRequestIsMutable(); - request_.set(index, value); - onChanged(); - } else { - requestBuilder_.setMessage(index, value); - } - return this; - } - public Builder setRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - request_.set(index, builderForValue.build()); - onChanged(); - } else { - requestBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addRequest(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (requestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRequestIsMutable(); - request_.add(value); - onChanged(); - } else { - requestBuilder_.addMessage(value); - } - return this; - } - public Builder addRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (requestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureRequestIsMutable(); - request_.add(index, value); - onChanged(); - } else { - requestBuilder_.addMessage(index, value); - } - return this; - } - public Builder addRequest( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - request_.add(builderForValue.build()); - onChanged(); - } else { - requestBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - request_.add(index, builderForValue.build()); - onChanged(); - } else { - requestBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllRequest( - java.lang.Iterable values) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - super.addAll(values, request_); - onChanged(); - } else { - requestBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearRequest() { - if (requestBuilder_ == null) { - request_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - requestBuilder_.clear(); - } - return this; - } - public Builder removeRequest(int index) { - if (requestBuilder_ == null) { - ensureRequestIsMutable(); - request_.remove(index); - onChanged(); - } else { - requestBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getRequestBuilder( - int index) { - return getRequestFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( - int index) { - if (requestBuilder_ == null) { - return request_.get(index); } else { - return requestBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getRequestOrBuilderList() { - if (requestBuilder_ != null) { - return requestBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(request_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addRequestBuilder() { - return getRequestFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addRequestBuilder( - int index) { - return getRequestFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); - } - public java.util.List - getRequestBuilderList() { - return getRequestFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> - getRequestFieldBuilder() { - if (requestBuilder_ == null) { - requestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( - request_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - request_ = null; - } - return requestBuilder_; - } - - // optional bool atomic = 2; - private boolean atomic_ ; - public boolean hasAtomic() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public boolean getAtomic() { - return atomic_; - } - public Builder setAtomic(boolean value) { - bitField0_ |= 0x00000002; - atomic_ = value; - onChanged(); - return this; - } - public Builder clearAtomic() { - bitField0_ = (bitField0_ & ~0x00000002); - atomic_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:MultiRequest) - } - - static { - defaultInstance = new MultiRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiRequest) - } - - public interface MultiResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .Parameter response = 1; - java.util.List - getResponseList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index); - int getResponseCount(); - java.util.List - getResponseOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( - int index); - } - public static final class MultiResponse extends - com.google.protobuf.GeneratedMessage - implements MultiResponseOrBuilder { - // Use MultiResponse.newBuilder() to construct. - private MultiResponse(Builder builder) { - super(builder); - } - private MultiResponse(boolean noInit) {} - - private static final MultiResponse defaultInstance; - public static MultiResponse getDefaultInstance() { - return defaultInstance; - } - - public MultiResponse getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_fieldAccessorTable; - } - - // repeated .Parameter response = 1; - public static final int RESPONSE_FIELD_NUMBER = 1; - private java.util.List response_; - public java.util.List getResponseList() { - return response_; - } - public java.util.List - getResponseOrBuilderList() { - return response_; - } - public int getResponseCount() { - return response_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index) { - return response_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( - int index) { - return response_.get(index); - } - - private void initFields() { - response_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getResponseCount(); i++) { - if (!getResponse(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < response_.size(); i++) { - output.writeMessage(1, response_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < response_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, response_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) obj; - - boolean result = true; - result = result && getResponseList() - .equals(other.getResponseList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getResponseCount() > 0) { - hash = (37 * hash) + RESPONSE_FIELD_NUMBER; - hash = (53 * hash) + getResponseList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResponseFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (responseBuilder_ == null) { - response_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - responseBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse(this); - int from_bitField0_ = bitField0_; - if (responseBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - response_ = java.util.Collections.unmodifiableList(response_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.response_ = response_; - } else { - result.response_ = responseBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance()) return this; - if (responseBuilder_ == null) { - if (!other.response_.isEmpty()) { - if (response_.isEmpty()) { - response_ = other.response_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureResponseIsMutable(); - response_.addAll(other.response_); - } - onChanged(); - } - } else { - if (!other.response_.isEmpty()) { - if (responseBuilder_.isEmpty()) { - responseBuilder_.dispose(); - responseBuilder_ = null; - response_ = other.response_; - bitField0_ = (bitField0_ & ~0x00000001); - responseBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getResponseFieldBuilder() : null; - } else { - responseBuilder_.addAllMessages(other.response_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getResponseCount(); i++) { - if (!getResponse(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addResponse(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated .Parameter response = 1; - private java.util.List response_ = - java.util.Collections.emptyList(); - private void ensureResponseIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - response_ = new java.util.ArrayList(response_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> responseBuilder_; - - public java.util.List getResponseList() { - if (responseBuilder_ == null) { - return java.util.Collections.unmodifiableList(response_); - } else { - return responseBuilder_.getMessageList(); - } - } - public int getResponseCount() { - if (responseBuilder_ == null) { - return response_.size(); - } else { - return responseBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index) { - if (responseBuilder_ == null) { - return response_.get(index); - } else { - return responseBuilder_.getMessage(index); - } - } - public Builder setResponse( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (responseBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResponseIsMutable(); - response_.set(index, value); - onChanged(); - } else { - responseBuilder_.setMessage(index, value); - } - return this; - } - public Builder setResponse( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - response_.set(index, builderForValue.build()); - onChanged(); - } else { - responseBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addResponse(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (responseBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResponseIsMutable(); - response_.add(value); - onChanged(); - } else { - responseBuilder_.addMessage(value); - } - return this; - } - public Builder addResponse( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { - if (responseBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResponseIsMutable(); - response_.add(index, value); - onChanged(); - } else { - responseBuilder_.addMessage(index, value); - } - return this; - } - public Builder addResponse( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - response_.add(builderForValue.build()); - onChanged(); - } else { - responseBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addResponse( - int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - response_.add(index, builderForValue.build()); - onChanged(); - } else { - responseBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllResponse( - java.lang.Iterable values) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - super.addAll(values, response_); - onChanged(); - } else { - responseBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearResponse() { - if (responseBuilder_ == null) { - response_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - responseBuilder_.clear(); - } - return this; - } - public Builder removeResponse(int index) { - if (responseBuilder_ == null) { - ensureResponseIsMutable(); - response_.remove(index); - onChanged(); - } else { - responseBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getResponseBuilder( - int index) { - return getResponseFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( - int index) { - if (responseBuilder_ == null) { - return response_.get(index); } else { - return responseBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getResponseOrBuilderList() { - if (responseBuilder_ != null) { - return responseBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(response_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addResponseBuilder() { - return getResponseFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addResponseBuilder( - int index) { - return getResponseFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); - } - public java.util.List - getResponseBuilderList() { - return getResponseFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> - getResponseFieldBuilder() { - if (responseBuilder_ == null) { - responseBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( - response_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - response_ = null; - } - return responseBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MultiResponse) - } - - static { - defaultInstance = new MultiResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiResponse) - } - - public static abstract class RegionClientService - implements com.google.protobuf.Service { - protected RegionClientService() {} - - public interface Interface { - public abstract void get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new RegionClientService() { - @java.lang.Override - public void get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done) { - impl.get(controller, request, done); - } - - @java.lang.Override - public void mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done) { - impl.mutate(controller, request, done); - } - - @java.lang.Override - public void scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done) { - impl.scan(controller, request, done); - } - - @java.lang.Override - public void lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done) { - impl.lockRow(controller, request, done); - } - - @java.lang.Override - public void unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done) { - impl.unlockRow(controller, request, done); - } - - @java.lang.Override - public void bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done) { - impl.bulkLoadHFile(controller, request, done); - } - - @java.lang.Override - public void execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done) { - impl.execCoprocessor(controller, request, done); - } - - @java.lang.Override - public void multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done) { - impl.multi(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)request); - case 1: - return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)request); - case 2: - return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)request); - case 3: - return impl.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)request); - case 4: - return impl.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)request); - case 5: - return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)request); - case 6: - return impl.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)request); - case 7: - return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - public abstract void get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done); - - public abstract void multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 4: - this.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 5: - this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 6: - this.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 7: - this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); - case 7: - return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.RegionClientService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance())); - } - - public void mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance())); - } - - public void scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance())); - } - - public void lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance())); - } - - public void unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance())); - } - - public void bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance())); - } - - public void execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance())); - } - - public void multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse get( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse mutate( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse scan( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse lockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse unlockRow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse bulkLoadHFile( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse execCoprocessor( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse multi( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(7), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance()); - } - - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Column_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Column_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Attribute_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Attribute_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Get_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Get_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Result_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Result_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GetResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GetResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Condition_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Condition_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Mutate_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Mutate_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Mutate_ColumnValue_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Mutate_ColumnValue_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Mutate_ColumnValue_QualifierValue_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MutateRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MutateRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MutateResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MutateResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Scan_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Scan_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ScanRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ScanRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ScanResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ScanResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_LockRowRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_LockRowRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_LockRowResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_LockRowResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UnlockRowRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UnlockRowRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UnlockRowResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UnlockRowResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BulkLoadHFileRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BulkLoadHFileRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_BulkLoadHFileResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_BulkLoadHFileResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Parameter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Parameter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Property_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Property_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_Exec_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_Exec_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ExecCoprocessorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ExecCoprocessorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ExecCoprocessorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ExecCoprocessorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\022RegionClient.proto\032\013hbase.proto\"+\n\006Col" + - "umn\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"(" + - "\n\tAttribute\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014" + - "\"\310\001\n\003Get\022\013\n\003row\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007." + - "Column\022\035\n\tattribute\030\003 \003(\0132\n.Attribute\022\016\n" + - "\006lockId\030\004 \001(\004\022\032\n\006filter\030\005 \001(\0132\n.Paramete" + - "r\022\035\n\ttimeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxV" + - "ersions\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004t" + - "rue\"\"\n\006Result\022\030\n\005value\030\001 \003(\0132\t.KeyValue\"" + - "r\n\nGetRequest\022 \n\006region\030\001 \002(\0132\020.RegionSp", - "ecifier\022\021\n\003get\030\002 \002(\0132\004.Get\022\030\n\020closestRow" + - "Before\030\003 \001(\010\022\025\n\rexistenceOnly\030\004 \001(\010\"6\n\013G" + - "etResponse\022\027\n\006result\030\001 \001(\0132\007.Result\022\016\n\006e" + - "xists\030\002 \001(\010\"\355\003\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016" + - "\n\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022+\n\013com" + - "pareType\030\004 \002(\0162\026.Condition.CompareType\022)" + - "\n\ncomparator\030\005 \002(\0162\025.Condition.Comparato" + - "r\022\r\n\005value\030\006 \001(\014\"r\n\013CompareType\022\010\n\004LESS\020" + - "\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_E" + - "QUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020", - "\005\022\t\n\005NO_OP\020\006\"\324\001\n\nComparator\022\025\n\021BINARY_CO" + - "MPARATOR\020\000\022\034\n\030BINARY_PREFIX_COMPARATOR\020\001" + - "\022\026\n\022BIT_AND_COMPARATOR\020\002\022\025\n\021BIT_OR_COMPA" + - "RATOR\020\003\022\026\n\022BIT_XOR_COMPARATOR\020\004\022\023\n\017NULL_" + - "COMPARATOR\020\005\022\033\n\027REGEX_STRING_COMPARATOR\020" + - "\006\022\030\n\024SUBSTRING_COMPARATOR\020\007\"\374\003\n\006Mutate\022\013" + - "\n\003row\030\001 \002(\014\022&\n\nmutateType\030\002 \002(\0162\022.Mutate" + - ".MutateType\022(\n\013columnValue\030\003 \003(\0132\023.Mutat" + - "e.ColumnValue\022\035\n\tattribute\030\004 \003(\0132\n.Attri" + - "bute\022\021\n\ttimestamp\030\005 \001(\004\022\016\n\006lockId\030\006 \001(\004\022", - "\030\n\nwriteToWAL\030\007 \001(\010:\004true\022\035\n\ttimeRange\030\n" + - " \001(\0132\n.TimeRange\032\263\001\n\013ColumnValue\022\016\n\006fami" + - "ly\030\001 \002(\014\022:\n\016qualifierValue\030\002 \003(\0132\".Mutat" + - "e.ColumnValue.QualifierValue\022\021\n\ttimestam" + - "p\030\003 \001(\004\032E\n\016QualifierValue\022\021\n\tqualifier\030\001" + - " \002(\014\022\r\n\005value\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\"b" + - "\n\nMutateType\022\n\n\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022" + - "\007\n\003PUT\020\002\022\n\n\006DELETE\020\003\022\021\n\rDELETE_COLUMN\020\004\022" + - "\021\n\rDELETE_FAMILY\020\005\"i\n\rMutateRequest\022 \n\006r" + - "egion\030\001 \002(\0132\020.RegionSpecifier\022\027\n\006mutate\030", - "\002 \002(\0132\007.Mutate\022\035\n\tcondition\030\003 \001(\0132\n.Cond" + - "ition\"<\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132" + - "\007.Result\022\021\n\tprocessed\030\002 \001(\010\"\367\001\n\004Scan\022\027\n\006" + - "column\030\001 \003(\0132\007.Column\022\035\n\tattribute\030\002 \003(\013" + - "2\n.Attribute\022\020\n\010startRow\030\003 \001(\014\022\017\n\007stopRo" + - "w\030\004 \001(\014\022\032\n\006filter\030\005 \001(\0132\n.Parameter\022\035\n\tt" + - "imeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxVersion" + - "s\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004true\022\023\n" + - "\013rowsToCache\030\t \001(\r\022\021\n\tbatchSize\030\n \001(\r\"a\n" + - "\013ScanRequest\022\021\n\tscannerId\030\001 \001(\004\022\023\n\004scan\030", - "\002 \001(\0132\005.Scan\022\024\n\014numberOfRows\030\003 \001(\r\022\024\n\014cl" + - "oseScanner\030\004 \001(\010\"\\\n\014ScanResponse\022\027\n\006resu" + - "lt\030\001 \003(\0132\007.Result\022\021\n\tscannerId\030\002 \001(\004\022\023\n\013" + - "moreResults\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\"?\n\016LockRo" + - "wRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + - "er\022\013\n\003row\030\002 \003(\014\".\n\017LockRowResponse\022\016\n\006lo" + - "ckId\030\001 \002(\004\022\013\n\003ttl\030\002 \001(\r\"D\n\020UnlockRowRequ" + - "est\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\016\n" + - "\006lockId\030\002 \002(\004\"\023\n\021UnlockRowResponse\"\232\001\n\024B" + - "ulkLoadHFileRequest\022 \n\006region\030\001 \002(\0132\020.Re", - "gionSpecifier\0224\n\nfamilyPath\030\002 \003(\0132 .Bulk" + - "LoadHFileRequest.FamilyPath\032*\n\nFamilyPat" + - "h\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkL" + - "oadHFileResponse\022\016\n\006loaded\030\001 \002(\010\".\n\tPara" + - "meter\022\014\n\004type\030\001 \002(\t\022\023\n\013binaryValue\030\002 \001(\014" + - "\"\'\n\010Property\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(" + - "\t\"y\n\004Exec\022\013\n\003row\030\001 \002(\014\022\024\n\014protocolName\030\002" + - " \002(\t\022\022\n\nmethodName\030\003 \002(\t\022\033\n\010property\030\004 \003" + - "(\0132\t.Property\022\035\n\tparameter\030\005 \003(\0132\n.Param" + - "eter\"O\n\026ExecCoprocessorRequest\022 \n\006region", - "\030\001 \002(\0132\020.RegionSpecifier\022\023\n\004call\030\002 \002(\0132\005" + - ".Exec\"H\n\027ExecCoprocessorResponse\022\022\n\nregi" + - "onName\030\001 \002(\014\022\031\n\005value\030\002 \002(\0132\n.Parameter\"" + - ";\n\014MultiRequest\022\033\n\007request\030\001 \003(\0132\n.Param" + - "eter\022\016\n\006atomic\030\002 \001(\010\"-\n\rMultiResponse\022\034\n" + - "\010response\030\001 \003(\0132\n.Parameter2\227\003\n\023RegionCl" + - "ientService\022 \n\003get\022\013.GetRequest\032\014.GetRes" + - "ponse\022)\n\006mutate\022\016.MutateRequest\032\017.Mutate" + - "Response\022#\n\004scan\022\014.ScanRequest\032\r.ScanRes" + - "ponse\022,\n\007lockRow\022\017.LockRowRequest\032\020.Lock", - "RowResponse\0222\n\tunlockRow\022\021.UnlockRowRequ" + - "est\032\022.UnlockRowResponse\022>\n\rbulkLoadHFile" + - "\022\025.BulkLoadHFileRequest\032\026.BulkLoadHFileR" + - "esponse\022D\n\017execCoprocessor\022\027.ExecCoproce" + - "ssorRequest\032\030.ExecCoprocessorResponse\022&\n" + - "\005multi\022\r.MultiRequest\032\016.MultiResponseBH\n" + - "*org.apache.hadoop.hbase.protobuf.genera" + - "tedB\022RegionClientProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_Column_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_Column_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Column_descriptor, - new java.lang.String[] { "Family", "Qualifier", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder.class); - internal_static_Attribute_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_Attribute_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Attribute_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder.class); - internal_static_Get_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_Get_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Get_descriptor, - new java.lang.String[] { "Row", "Column", "Attribute", "LockId", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder.class); - internal_static_Result_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_Result_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Result_descriptor, - new java.lang.String[] { "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder.class); - internal_static_GetRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_GetRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetRequest_descriptor, - new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.Builder.class); - internal_static_GetResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_GetResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GetResponse_descriptor, - new java.lang.String[] { "Result", "Exists", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.Builder.class); - internal_static_Condition_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_Condition_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Condition_descriptor, - new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder.class); - internal_static_Mutate_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_Mutate_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Mutate_descriptor, - new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Attribute", "Timestamp", "LockId", "WriteToWAL", "TimeRange", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder.class); - internal_static_Mutate_ColumnValue_descriptor = - internal_static_Mutate_descriptor.getNestedTypes().get(0); - internal_static_Mutate_ColumnValue_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Mutate_ColumnValue_descriptor, - new java.lang.String[] { "Family", "QualifierValue", "Timestamp", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder.class); - internal_static_Mutate_ColumnValue_QualifierValue_descriptor = - internal_static_Mutate_ColumnValue_descriptor.getNestedTypes().get(0); - internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Mutate_ColumnValue_QualifierValue_descriptor, - new java.lang.String[] { "Qualifier", "Value", "Timestamp", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder.class); - internal_static_MutateRequest_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_MutateRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MutateRequest_descriptor, - new java.lang.String[] { "Region", "Mutate", "Condition", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.Builder.class); - internal_static_MutateResponse_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_MutateResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MutateResponse_descriptor, - new java.lang.String[] { "Result", "Processed", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.Builder.class); - internal_static_Scan_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_Scan_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Scan_descriptor, - new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "RowsToCache", "BatchSize", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder.class); - internal_static_ScanRequest_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_ScanRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ScanRequest_descriptor, - new java.lang.String[] { "ScannerId", "Scan", "NumberOfRows", "CloseScanner", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.Builder.class); - internal_static_ScanResponse_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_ScanResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ScanResponse_descriptor, - new java.lang.String[] { "Result", "ScannerId", "MoreResults", "Ttl", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.Builder.class); - internal_static_LockRowRequest_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_LockRowRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_LockRowRequest_descriptor, - new java.lang.String[] { "Region", "Row", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.Builder.class); - internal_static_LockRowResponse_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_LockRowResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_LockRowResponse_descriptor, - new java.lang.String[] { "LockId", "Ttl", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.Builder.class); - internal_static_UnlockRowRequest_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_UnlockRowRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UnlockRowRequest_descriptor, - new java.lang.String[] { "Region", "LockId", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.Builder.class); - internal_static_UnlockRowResponse_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_UnlockRowResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UnlockRowResponse_descriptor, - new java.lang.String[] { }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.Builder.class); - internal_static_BulkLoadHFileRequest_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_BulkLoadHFileRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BulkLoadHFileRequest_descriptor, - new java.lang.String[] { "Region", "FamilyPath", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.Builder.class); - internal_static_BulkLoadHFileRequest_FamilyPath_descriptor = - internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); - internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BulkLoadHFileRequest_FamilyPath_descriptor, - new java.lang.String[] { "Family", "Path", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); - internal_static_BulkLoadHFileResponse_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_BulkLoadHFileResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_BulkLoadHFileResponse_descriptor, - new java.lang.String[] { "Loaded", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.Builder.class); - internal_static_Parameter_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_Parameter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Parameter_descriptor, - new java.lang.String[] { "Type", "BinaryValue", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder.class); - internal_static_Property_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_Property_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Property_descriptor, - new java.lang.String[] { "Name", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder.class); - internal_static_Exec_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_Exec_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_Exec_descriptor, - new java.lang.String[] { "Row", "ProtocolName", "MethodName", "Property", "Parameter", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder.class); - internal_static_ExecCoprocessorRequest_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_ExecCoprocessorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ExecCoprocessorRequest_descriptor, - new java.lang.String[] { "Region", "Call", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.Builder.class); - internal_static_ExecCoprocessorResponse_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_ExecCoprocessorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ExecCoprocessorResponse_descriptor, - new java.lang.String[] { "RegionName", "Value", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.Builder.class); - internal_static_MultiRequest_descriptor = - getDescriptor().getMessageTypes().get(24); - internal_static_MultiRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiRequest_descriptor, - new java.lang.String[] { "Request", "Atomic", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.Builder.class); - internal_static_MultiResponse_descriptor = - getDescriptor().getMessageTypes().get(25); - internal_static_MultiResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiResponse_descriptor, - new java.lang.String[] { "Response", }, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.class, - org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 9b34e61..0d22c0e 100644 --- src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -40,11 +40,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Random; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -119,7 +117,7 @@ import org.apache.hadoop.hbase.ipc.Invocation; import org.apache.hadoop.hbase.ipc.ProtocolSignature; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; -import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; import org.apache.hadoop.hbase.regionserver.handler.CloseMetaHandler; import org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler; @@ -139,7 +137,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CompressionTest; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSTableDescriptors; -import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.InfoServer; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Sleeper; @@ -170,36 +167,22 @@ import com.google.common.collect.Lists; * the HMaster. There are many HRegionServers in a single HBase deployment. */ @InterfaceAudience.Private -public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, - Runnable, RegionServerServices { +public class HRegionServer extends RegionServer + implements HRegionInterface, HBaseRPCErrorHandler { public static final Log LOG = LogFactory.getLog(HRegionServer.class); - // Set when a report to the master comes back with a message asking us to - // shutdown. Also set by call to stop when debugging or running unit tests - // of HRegionServer in isolation. - protected volatile boolean stopped = false; - // A state before we go into stopped state. At this stage we're closing user // space regions. private boolean stopping = false; - // Go down hard. Used if file system becomes unavailable and also in - // debugging and unit tests. - protected volatile boolean abortRequested; - private volatile boolean killed = false; - // If false, the file system has become unavailable - protected volatile boolean fsOk; - protected final Configuration conf; protected final AtomicBoolean haveRootRegion = new AtomicBoolean(false); - private HFileSystem fs; private boolean useHBaseChecksum; // verify hbase checksums? private Path rootDir; - private final Random rand = new Random(); //RegionName vs current action in progress //true - if open region action in progress @@ -207,13 +190,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, private final ConcurrentSkipListMap regionsInTransitionInRS = new ConcurrentSkipListMap(Bytes.BYTES_COMPARATOR); - /** - * Map of regions currently being served by this region server. Key is the - * encoded region name. All access should be synchronized. - */ - protected final Map onlineRegions = - new ConcurrentHashMap(); - protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); final int numRetries; @@ -222,8 +198,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, protected final int numRegionsToReport; - private final long maxScannerResultSize; - // Remote HMaster private HMasterRegionInterface hbaseMaster; @@ -233,13 +207,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, private final InetSocketAddress isa; - // Leases - private Leases leases; - - // Request counter. - // Do we need this? Can't we just sum region counters? St.Ack 20110412 - private AtomicInteger requestCount = new AtomicInteger(); - // Info server. Default access so can be used by unit tests. REGIONSERVER // is name of the webapp and the attribute name used stuffing this instance // into web context. @@ -263,9 +230,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, // Compactions public CompactSplitThread compactSplitThread; - // Cache flushing - MemStoreFlusher cacheFlusher; - /* * Check for compactions requests. */ @@ -279,9 +243,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, // flag set after we're done setting up server threads (used for testing) protected volatile boolean isOnline; - final Map scanners = - new ConcurrentHashMap(); - // zookeeper connection and watcher private ZooKeeperWatcher zooKeeper; @@ -405,8 +366,10 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, if (initialIsa.getAddress() == null) { throw new IllegalArgumentException("Failed resolve of " + initialIsa); } + this.rpcServer = HBaseRPC.getServer(this, - new Class[]{HRegionInterface.class, HBaseRPCErrorHandler.class, + new Class[]{HRegionInterface.class, ClientProtocol.class, + HBaseRPCErrorHandler.class, OnlineRegions.class}, initialIsa.getHostName(), // BindAddress is IP we got for this server. initialIsa.getPort(), @@ -445,12 +408,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } } - private static final int NORMAL_QOS = 0; - private static final int QOS_THRESHOLD = 10; // the line between low and high qos - private static final int HIGH_QOS = 100; - @Retention(RetentionPolicy.RUNTIME) - private @interface QosPriority { + protected @interface QosPriority { int priority() default 0; } @@ -648,7 +607,7 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, // Create the thread for the ThriftServer. if (conf.getBoolean("hbase.regionserver.export.thrift", false)) { - thriftServer = new HRegionThriftServer(this, conf); + thriftServer = new HRegionThriftServer((RegionServer)this, conf); thriftServer.start(); LOG.info("Started Thrift API from Region Server."); } @@ -1081,110 +1040,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } /* - * Cleanup after Throwable caught invoking method. Converts t to - * IOE if it isn't already. - * - * @param t Throwable - * - * @return Throwable converted to an IOE; methods can only let out IOEs. - */ - private Throwable cleanup(final Throwable t) { - return cleanup(t, null); - } - - /* - * Cleanup after Throwable caught invoking method. Converts t to - * IOE if it isn't already. - * - * @param t Throwable - * - * @param msg Message to log in error. Can be null. - * - * @return Throwable converted to an IOE; methods can only let out IOEs. - */ - private Throwable cleanup(final Throwable t, final String msg) { - // Don't log as error if NSRE; NSRE is 'normal' operation. - if (t instanceof NotServingRegionException) { - LOG.debug("NotServingRegionException; " + t.getMessage()); - return t; - } - if (msg == null) { - LOG.error("", RemoteExceptionHandler.checkThrowable(t)); - } else { - LOG.error(msg, RemoteExceptionHandler.checkThrowable(t)); - } - if (!checkOOME(t)) { - checkFileSystem(); - } - return t; - } - - /* - * @param t - * - * @return Make t an IOE if it isn't already. - */ - private IOException convertThrowableToIOE(final Throwable t) { - return convertThrowableToIOE(t, null); - } - - /* - * @param t - * - * @param msg Message to put in new IOE if passed t is not an IOE - * - * @return Make t an IOE if it isn't already. - */ - private IOException convertThrowableToIOE(final Throwable t, final String msg) { - return (t instanceof IOException ? (IOException) t : msg == null - || msg.length() == 0 ? new IOException(t) : new IOException(msg, t)); - } - - /* - * Check if an OOME and, if so, abort immediately to avoid creating more objects. - * - * @param e - * - * @return True if we OOME'd and are aborting. - */ - public boolean checkOOME(final Throwable e) { - boolean stop = false; - try { - if (e instanceof OutOfMemoryError - || (e.getCause() != null && e.getCause() instanceof OutOfMemoryError) - || (e.getMessage() != null && e.getMessage().contains( - "java.lang.OutOfMemoryError"))) { - stop = true; - LOG.fatal( - "Run out of memory; HRegionServer will abort itself immediately", e); - } - } finally { - if (stop) { - Runtime.getRuntime().halt(1); - } - } - return stop; - } - - /** - * Checks to see if the file system is still accessible. If not, sets - * abortRequested and stopRequested - * - * @return false if file system is not available - */ - public boolean checkFileSystem() { - if (this.fsOk && this.fs != null) { - try { - FSUtils.checkFileSystemAvailable(this.fs); - } catch (IOException e) { - abort("File System not available", e); - this.fsOk = false; - } - } - return this.fsOk; - } - - /* * Inner class that runs on a long period checking if regions need compaction. */ private static class CompactionChecker extends Chore { @@ -2334,15 +2189,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } } - protected long addScanner(RegionScanner s) throws LeaseStillHeldException { - long scannerId = -1L; - scannerId = rand.nextLong(); - String scannerName = String.valueOf(scannerId); - scanners.put(scannerName, s); - this.leases.createLease(scannerName, new ScannerListener(scannerName)); - return scannerId; - } - public Result next(final long scannerId) throws IOException { Result[] res = next(scannerId, 1); if (res == null || res.length == 0) { @@ -2468,42 +2314,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } } - /** - * Instantiated as a scanner lease. If the lease times out, the scanner is - * closed - */ - private class ScannerListener implements LeaseListener { - private final String scannerName; - - ScannerListener(final String n) { - this.scannerName = n; - } - - public void leaseExpired() { - RegionScanner s = scanners.remove(this.scannerName); - if (s != null) { - LOG.info("Scanner " + this.scannerName + " lease expired on region " - + s.getRegionInfo().getRegionNameAsString()); - try { - HRegion region = getRegion(s.getRegionInfo().getRegionName()); - if (region != null && region.getCoprocessorHost() != null) { - region.getCoprocessorHost().preScannerClose(s); - } - - s.close(); - if (region != null && region.getCoprocessorHost() != null) { - region.getCoprocessorHost().postScannerClose(s); - } - } catch (IOException e) { - LOG.error("Closing scanner for " - + s.getRegionInfo().getRegionNameAsString(), e); - } - } else { - LOG.info("Scanner " + this.scannerName + " lease expired"); - } - } - } - // // Methods that do the actual work for the remote API // @@ -2585,39 +2395,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } } - protected long addRowLock(Integer r, HRegion region) - throws LeaseStillHeldException { - long lockId = -1L; - lockId = rand.nextLong(); - String lockName = String.valueOf(lockId); - rowlocks.put(lockName, r); - this.leases.createLease(lockName, new RowLockListener(lockName, region)); - return lockId; - } - - /** - * Method to get the Integer lock identifier used internally from the long - * lock identifier used by the client. - * - * @param lockId - * long row lock identifier from client - * @return intId Integer row lock used internally in HRegion - * @throws IOException - * Thrown if this is not a valid client lock id. - */ - Integer getLockFromId(long lockId) throws IOException { - if (lockId == -1L) { - return null; - } - String lockName = String.valueOf(lockId); - Integer rl = rowlocks.get(lockName); - if (rl == null) { - throw new UnknownRowLockException("Invalid row lock"); - } - this.leases.renewLease(lockName); - return rl; - } - @Override @QosPriority(priority=HIGH_QOS) public void unlockRow(byte[] regionName, long lockId) throws IOException { @@ -2663,30 +2440,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, return region.bulkLoadHFiles(familyPaths); } - Map rowlocks = new ConcurrentHashMap(); - - /** - * Instantiated as a row lock lease. If the lease times out, the row lock is - * released - */ - private class RowLockListener implements LeaseListener { - private final String lockName; - private final HRegion region; - - RowLockListener(final String lockName, final HRegion region) { - this.lockName = lockName; - this.region = region; - } - - public void leaseExpired() { - LOG.info("Row Lock " + this.lockName + " lease expired"); - Integer r = rowlocks.remove(this.lockName); - if (r != null) { - region.releaseRowLock(r); - } - } - } - // Region open/close direct RPCs @Override @@ -3057,15 +2810,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, return r; } - /** - * @param regionName - * @return HRegion for the passed binary regionName or null if - * named region is not member of the online regions. - */ - public HRegion getOnlineRegion(final byte[] regionName) { - return getFromOnlineRegions(HRegionInfo.encodeRegionName(regionName)); - } - /** @return the request count */ public AtomicInteger getRequestCount() { return this.requestCount; @@ -3084,25 +2828,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, } /** - * Protected utility method for safely obtaining an HRegion handle. - * - * @param regionName - * Name of online {@link HRegion} to return - * @return {@link HRegion} for regionName - * @throws NotServingRegionException - */ - protected HRegion getRegion(final byte[] regionName) - throws NotServingRegionException { - HRegion region = null; - region = getOnlineRegion(regionName); - if (region == null) { - throw new NotServingRegionException("Region is not online: " + - Bytes.toStringBinary(regionName)); - } - return region; - } - - /** * Get the top N most loaded regions this server is serving so we can tell the * master which regions it can reallocate if we're overloaded. TODO: actually * calculate which regions are most loaded. (Right now, we're just grabbing @@ -3123,21 +2848,6 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, return regions.toArray(new HRegionInfo[regions.size()]); } - /** - * Called to verify that this server is up and running. - * - * @throws IOException - */ - protected void checkOpen() throws IOException { - if (this.stopped || this.abortRequested) { - throw new RegionServerStoppedException("Server " + getServerName() + - " not running" + (this.abortRequested ? ", aborting" : "")); - } - if (!fsOk) { - throw new RegionServerStoppedException("File system not available"); - } - } - @Override @QosPriority(priority=HIGH_QOS) public ProtocolSignature getProtocolSignature( @@ -3145,6 +2855,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, throws IOException { if (protocol.equals(HRegionInterface.class.getName())) { return new ProtocolSignature(HRegionInterface.VERSION, null); + } else if (protocol.equals(ClientProtocol.class.getName())) { + return new ProtocolSignature(ClientProtocol.VERSION, null); } throw new IOException("Unknown protocol: " + protocol); } @@ -3155,6 +2867,8 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, throws IOException { if (protocol.equals(HRegionInterface.class.getName())) { return HRegionInterface.VERSION; + } else if (protocol.equals(ClientProtocol.class.getName())) { + return ClientProtocol.VERSION; } throw new IOException("Unknown protocol: " + protocol); } @@ -3687,4 +3401,4 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler, mxBeanInfo); LOG.info("Registered RegionServer MXBean"); } -} \ No newline at end of file +} diff --git src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java index 703e73d..759633d 100644 --- src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java +++ src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java @@ -36,11 +36,17 @@ import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; import org.apache.hadoop.hbase.thrift.ThriftServerRunner; import org.apache.hadoop.hbase.thrift.ThriftUtilities; import org.apache.hadoop.hbase.thrift.generated.IOError; import org.apache.hadoop.hbase.thrift.generated.TRowResult; +import com.google.protobuf.ServiceException; + /** * HRegionThriftServer - this class starts up a Thrift server in the same * JVM where the RegionServer is running. It inherits most of the @@ -56,14 +62,14 @@ public class HRegionThriftServer extends Thread { public static final Log LOG = LogFactory.getLog(HRegionThriftServer.class); - private final HRegionServer rs; + private final RegionServer rs; private final ThriftServerRunner serverRunner; /** * Create an instance of the glue object that connects the * RegionServer with the standard ThriftServer implementation */ - HRegionThriftServer(HRegionServer regionServer, Configuration conf) + HRegionThriftServer(RegionServer regionServer, Configuration conf) throws IOException { super("Region Thrift Server"); this.rs = regionServer; @@ -130,7 +136,10 @@ public class HRegionThriftServer extends Thread { if (columns == null) { Get get = new Get(row); get.setTimeRange(Long.MIN_VALUE, timestamp); - Result result = rs.get(regionName, get); + GetRequest request = + RequestConverter.buildGetRequest(regionName, get); + GetResponse response = rs.get(null, request); + Result result = ProtobufUtil.toResult(response.getResult()); return ThriftUtilities.rowResultFromHBase(result); } Get get = new Get(row); @@ -143,7 +152,10 @@ public class HRegionThriftServer extends Thread { } } get.setTimeRange(Long.MIN_VALUE, timestamp); - Result result = rs.get(regionName, get); + GetRequest request = + RequestConverter.buildGetRequest(regionName, get); + GetResponse response = rs.get(null, request); + Result result = ProtobufUtil.toResult(response.getResult()); return ThriftUtilities.rowResultFromHBase(result); } catch (NotServingRegionException e) { if (!redirect) { @@ -153,6 +165,10 @@ public class HRegionThriftServer extends Thread { LOG.debug("ThriftServer redirecting getRowWithColumnsTs"); return super.getRowWithColumnsTs(tableName, rowb, columns, timestamp, attributes); + } catch (ServiceException se) { + IOException e = ProtobufUtil.getRemoteException(se); + LOG.warn(e.getMessage(), e); + throw new IOError(e.getMessage()); } catch (IOException e) { LOG.warn(e.getMessage(), e); throw new IOError(e.getMessage()); diff --git src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java index b520f3f..0b7ed0e 100644 --- src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java +++ src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java @@ -55,7 +55,7 @@ import java.io.IOException; @InterfaceAudience.Private public class Leases extends HasThread { private static final Log LOG = LogFactory.getLog(Leases.class.getName()); - private final int leasePeriod; + protected final int leasePeriod; private final int leaseCheckFrequency; private volatile DelayQueue leaseQueue = new DelayQueue(); protected final Map leases = new HashMap(); diff --git src/main/java/org/apache/hadoop/hbase/regionserver/RegionServer.java src/main/java/org/apache/hadoop/hbase/regionserver/RegionServer.java new file mode 100644 index 0000000..9487a1c --- /dev/null +++ src/main/java/org/apache/hadoop/hbase/regionserver/RegionServer.java @@ -0,0 +1,1164 @@ +/** + * Copyright 2010 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.regionserver; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HConstants.OperationStatusCode; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.RemoteExceptionHandler; +import org.apache.hadoop.hbase.UnknownRowLockException; +import org.apache.hadoop.hbase.UnknownScannerException; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.RowMutations; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.coprocessor.Exec; +import org.apache.hadoop.hbase.client.coprocessor.ExecResult; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; +import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.regionserver.HRegionServer.QosPriority; +import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.hbase.util.Pair; + +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + +/** + * RegionServer makes a set of HRegions available to clients. It checks in with + * the HMaster. There are many RegionServers in a single HBase deployment. + * + * This will be a replacement for the HRegionServer. It has protobuf protocols + * implementations. All the HRegionInterface implementations stay in HRegionServer + * for possible backward compatibility requests. This also makes it easier to + * rip of HRegionInterface later on. + */ +@InterfaceAudience.Private +public abstract class RegionServer implements + ClientProtocol, Runnable, RegionServerServices { + + private static final Log LOG = LogFactory.getLog(RegionServer.class); + + private final Random rand = new Random(); + + protected long maxScannerResultSize; + + // Cache flushing + protected MemStoreFlusher cacheFlusher; + + final Map scanners = + new ConcurrentHashMap(); + + /** + * Map of regions currently being served by this region server. Key is the + * encoded region name. All access should be synchronized. + */ + protected final Map onlineRegions = + new ConcurrentHashMap(); + + // Leases + protected Leases leases; + + // Request counter. + // Do we need this? Can't we just sum region counters? St.Ack 20110412 + protected AtomicInteger requestCount = new AtomicInteger(); + + // If false, the file system has become unavailable + protected volatile boolean fsOk; + protected HFileSystem fs; + + protected static final int NORMAL_QOS = 0; + protected static final int QOS_THRESHOLD = 10; // the line between low and high qos + protected static final int HIGH_QOS = 100; + + // Set when a report to the master comes back with a message asking us to + // shutdown. Also set by call to stop when debugging or running unit tests + // of HRegionServer in isolation. + protected volatile boolean stopped = false; + + // Go down hard. Used if file system becomes unavailable and also in + // debugging and unit tests. + protected volatile boolean abortRequested; + + Map rowlocks = new ConcurrentHashMap(); + + /** + * Instantiated as a row lock lease. If the lease times out, the row lock is + * released + */ + private class RowLockListener implements LeaseListener { + private final String lockName; + private final HRegion region; + + RowLockListener(final String lockName, final HRegion region) { + this.lockName = lockName; + this.region = region; + } + + public void leaseExpired() { + LOG.info("Row Lock " + this.lockName + " lease expired"); + Integer r = rowlocks.remove(this.lockName); + if (r != null) { + region.releaseRowLock(r); + } + } + } + + /** + * Instantiated as a scanner lease. If the lease times out, the scanner is + * closed + */ + private class ScannerListener implements LeaseListener { + private final String scannerName; + + ScannerListener(final String n) { + this.scannerName = n; + } + + public void leaseExpired() { + RegionScanner s = scanners.remove(this.scannerName); + if (s != null) { + LOG.info("Scanner " + this.scannerName + " lease expired on region " + + s.getRegionInfo().getRegionNameAsString()); + try { + HRegion region = getRegion(s.getRegionInfo().getRegionName()); + if (region != null && region.getCoprocessorHost() != null) { + region.getCoprocessorHost().preScannerClose(s); + } + + s.close(); + if (region != null && region.getCoprocessorHost() != null) { + region.getCoprocessorHost().postScannerClose(s); + } + } catch (IOException e) { + LOG.error("Closing scanner for " + + s.getRegionInfo().getRegionNameAsString(), e); + } + } else { + LOG.info("Scanner " + this.scannerName + " lease expired"); + } + } + } + + /** + * Method to get the Integer lock identifier used internally from the long + * lock identifier used by the client. + * + * @param lockId + * long row lock identifier from client + * @return intId Integer row lock used internally in HRegion + * @throws IOException + * Thrown if this is not a valid client lock id. + */ + Integer getLockFromId(long lockId) throws IOException { + if (lockId == -1L) { + return null; + } + String lockName = String.valueOf(lockId); + Integer rl = rowlocks.get(lockName); + if (rl == null) { + throw new UnknownRowLockException("Invalid row lock"); + } + this.leases.renewLease(lockName); + return rl; + } + + /** + * Called to verify that this server is up and running. + * + * @throws IOException + */ + protected void checkOpen() throws IOException { + if (this.stopped || this.abortRequested) { + throw new RegionServerStoppedException("Server " + getServerName() + + " not running" + (this.abortRequested ? ", aborting" : "")); + } + if (!fsOk) { + throw new RegionServerStoppedException("File system not available"); + } + } + + /** + * @param regionName + * @return HRegion for the passed binary regionName or null if + * named region is not member of the online regions. + */ + public HRegion getOnlineRegion(final byte[] regionName) { + String encodedRegionName = HRegionInfo.encodeRegionName(regionName); + return this.onlineRegions.get(encodedRegionName); + } + + /** + * Protected utility method for safely obtaining an HRegion handle. + * + * @param regionName + * Name of online {@link HRegion} to return + * @return {@link HRegion} for regionName + * @throws NotServingRegionException + */ + protected HRegion getRegion(final byte[] regionName) + throws NotServingRegionException { + HRegion region = null; + region = getOnlineRegion(regionName); + if (region == null) { + throw new NotServingRegionException("Region is not online: " + + Bytes.toStringBinary(regionName)); + } + return region; + } + + /* + * Cleanup after Throwable caught invoking method. Converts t to + * IOE if it isn't already. + * + * @param t Throwable + * + * @return Throwable converted to an IOE; methods can only let out IOEs. + */ + protected Throwable cleanup(final Throwable t) { + return cleanup(t, null); + } + + /* + * Cleanup after Throwable caught invoking method. Converts t to + * IOE if it isn't already. + * + * @param t Throwable + * + * @param msg Message to log in error. Can be null. + * + * @return Throwable converted to an IOE; methods can only let out IOEs. + */ + protected Throwable cleanup(final Throwable t, final String msg) { + // Don't log as error if NSRE; NSRE is 'normal' operation. + if (t instanceof NotServingRegionException) { + LOG.debug("NotServingRegionException; " + t.getMessage()); + return t; + } + if (msg == null) { + LOG.error("", RemoteExceptionHandler.checkThrowable(t)); + } else { + LOG.error(msg, RemoteExceptionHandler.checkThrowable(t)); + } + if (!checkOOME(t)) { + checkFileSystem(); + } + return t; + } + + /* + * @param t + * + * @return Make t an IOE if it isn't already. + */ + protected IOException convertThrowableToIOE(final Throwable t) { + return convertThrowableToIOE(t, null); + } + + /* + * @param t + * + * @param msg Message to put in new IOE if passed t is not an IOE + * + * @return Make t an IOE if it isn't already. + */ + protected IOException convertThrowableToIOE(final Throwable t, final String msg) { + return (t instanceof IOException ? (IOException) t : msg == null + || msg.length() == 0 ? new IOException(t) : new IOException(msg, t)); + } + + /* + * Check if an OOME and, if so, abort immediately to avoid creating more objects. + * + * @param e + * + * @return True if we OOME'd and are aborting. + */ + public boolean checkOOME(final Throwable e) { + boolean stop = false; + try { + if (e instanceof OutOfMemoryError + || (e.getCause() != null && e.getCause() instanceof OutOfMemoryError) + || (e.getMessage() != null && e.getMessage().contains( + "java.lang.OutOfMemoryError"))) { + stop = true; + LOG.fatal( + "Run out of memory; HRegionServer will abort itself immediately", e); + } + } finally { + if (stop) { + Runtime.getRuntime().halt(1); + } + } + return stop; + } + + /** + * Checks to see if the file system is still accessible. If not, sets + * abortRequested and stopRequested + * + * @return false if file system is not available + */ + public boolean checkFileSystem() { + if (this.fsOk && this.fs != null) { + try { + FSUtils.checkFileSystemAvailable(this.fs); + } catch (IOException e) { + abort("File System not available", e); + this.fsOk = false; + } + } + return this.fsOk; + } + + protected long addRowLock(Integer r, HRegion region) + throws LeaseStillHeldException { + long lockId = nextLong(); + String lockName = String.valueOf(lockId); + rowlocks.put(lockName, r); + this.leases.createLease(lockName, new RowLockListener(lockName, region)); + return lockId; + } + + protected long addScanner(RegionScanner s) throws LeaseStillHeldException { + long scannerId = nextLong(); + String scannerName = String.valueOf(scannerId); + scanners.put(scannerName, s); + this.leases.createLease(scannerName, new ScannerListener(scannerName)); + return scannerId; + } + + /** + * Generate a random positive long number + * + * @return a random positive long number + */ + protected long nextLong() { + long n = rand.nextLong(); + if (n == 0) { + return nextLong(); + } + if (n < 0) { + n = -n; + } + return n; + } + + // Start Client methods + + /** + * Get data from a table. + * + * @param controller the RPC controller + * @param request the get request + * @throws ServiceException + */ + @Override + public GetResponse get(final RpcController controller, + final GetRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + GetResponse.Builder builder = GetResponse.newBuilder(); + ClientProtos.Get get = request.getGet(); + Boolean existence = null; + Result r = null; + if (request.getClosestRowBefore()) { + if (get.getColumnCount() != 1) { + throw new DoNotRetryIOException( + "get ClosestRowBefore supports one and only one family now, not " + + get.getColumnCount() + " families"); + } + byte[] row = get.getRow().toByteArray(); + byte[] family = get.getColumn(0).getFamily().toByteArray(); + r = region.getClosestRowBefore(row, family); + } else { + Get clientGet = ProtobufUtil.toGet(get); + if (request.getExistenceOnly() && region.getCoprocessorHost() != null) { + existence = region.getCoprocessorHost().preExists(clientGet); + } + if (existence == null) { + Integer lock = getLockFromId(clientGet.getLockId()); + r = region.get(clientGet, lock); + if (request.getExistenceOnly()) { + boolean exists = r != null && !r.isEmpty(); + if (region.getCoprocessorHost() != null) { + exists = region.getCoprocessorHost().postExists(clientGet, exists); + } + existence = Boolean.valueOf(exists); + } + } + } + if (existence != null) { + builder.setExists(existence.booleanValue()); + } else if (r != null) { + builder.setResult(ProtobufUtil.toResult(r)); + } + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Mutate data in a table. + * + * @param controller the RPC controller + * @param request the mutate request + * @throws ServiceException + */ + @Override + public MutateResponse mutate(final RpcController controller, + final MutateRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + MutateResponse.Builder builder = MutateResponse.newBuilder(); + Mutate mutate = request.getMutate(); + if (!region.getRegionInfo().isMetaTable()) { + cacheFlusher.reclaimMemStoreMemory(); + } + Integer lock = null; + Result r = null; + Boolean processed = null; + MutateType type = mutate.getMutateType(); + switch (type) { + case APPEND: + r = append(region, mutate); + break; + case INCREMENT: + r = increment(region, mutate); + break; + case PUT: + Put put = ProtobufUtil.toPut(mutate); + lock = getLockFromId(put.getLockId()); + if (request.hasCondition()) { + Condition condition = request.getCondition(); + byte[] row = condition.getRow().toByteArray(); + byte[] family = condition.getFamily().toByteArray(); + byte[] qualifier = condition.getQualifier().toByteArray(); + CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name()); + WritableByteArrayComparable comparator = + (WritableByteArrayComparable)ProtobufUtil.toObject(condition.getComparator()); + if (region.getCoprocessorHost() != null) { + processed = region.getCoprocessorHost().preCheckAndPut( + row, family, qualifier, compareOp, comparator, put); + } + if (processed == null) { + boolean result = region.checkAndMutate(row, family, + qualifier, compareOp, comparator, put, lock, true); + if (region.getCoprocessorHost() != null) { + result = region.getCoprocessorHost().postCheckAndPut(row, family, + qualifier, compareOp, comparator, put, result); + } + processed = Boolean.valueOf(result); + } + } else { + region.put(put, lock); + processed = Boolean.TRUE; + } + break; + case DELETE: + Delete delete = ProtobufUtil.toDelete(mutate); + lock = getLockFromId(delete.getLockId()); + if (request.hasCondition()) { + Condition condition = request.getCondition(); + byte[] row = condition.getRow().toByteArray(); + byte[] family = condition.getFamily().toByteArray(); + byte[] qualifier = condition.getQualifier().toByteArray(); + CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name()); + WritableByteArrayComparable comparator = + (WritableByteArrayComparable)ProtobufUtil.toObject(condition.getComparator()); + if (region.getCoprocessorHost() != null) { + processed = region.getCoprocessorHost().preCheckAndDelete( + row, family, qualifier, compareOp, comparator, delete); + } + if (processed == null) { + boolean result = region.checkAndMutate(row, family, + qualifier, compareOp, comparator, delete, lock, true); + if (region.getCoprocessorHost() != null) { + result = region.getCoprocessorHost().postCheckAndDelete(row, family, + qualifier, compareOp, comparator, delete, result); + } + processed = Boolean.valueOf(result); + } + } else { + region.delete(delete, lock, delete.getWriteToWAL()); + processed = Boolean.TRUE; + } + break; + default: + throw new DoNotRetryIOException( + "Unsupported mutate type: " + type.name()); + } + if (processed != null) { + builder.setProcessed(processed.booleanValue()); + } else if (r != null) { + builder.setResult(ProtobufUtil.toResult(r)); + } + return builder.build(); + } catch (IOException ie) { + checkFileSystem(); + throw new ServiceException(ie); + } + } + + // + // remote scanner interface + // + + /** + * Scan data in a table. + * + * @param controller the RPC controller + * @param request the scan request + * @throws ServiceException + */ + @Override + public ScanResponse scan(final RpcController controller, + final ScanRequest request) throws ServiceException { + Leases.Lease lease = null; + String scannerName = null; + try { + if (!request.hasScannerId() && !request.hasScan()) { + throw new DoNotRetryIOException( + "Missing required input: scannerId or scan"); + } + long scannerId = -1; + if (request.hasScannerId()) { + scannerId = request.getScannerId(); + scannerName = String.valueOf(scannerId); + } + try { + checkOpen(); + } catch (IOException e) { + // If checkOpen failed, server not running or filesystem gone, + // cancel this lease; filesystem is gone or we're closing or something. + if (scannerName != null) { + try { + leases.cancelLease(scannerName); + } catch (LeaseException le) { + LOG.info("Server shutting down and client tried to access missing scanner " + + scannerName); + } + } + throw e; + } + requestCount.incrementAndGet(); + + try { + int ttl = 0; + HRegion region = null; + RegionScanner scanner = null; + boolean moreResults = true; + boolean closeScanner = false; + ScanResponse.Builder builder = ScanResponse.newBuilder(); + if (request.hasCloseScanner()) { + closeScanner = request.getCloseScanner(); + } + int rows = 1; + if (request.hasNumberOfRows()) { + rows = request.getNumberOfRows(); + } + if (request.hasScannerId()) { + scanner = scanners.get(scannerName); + if (scanner == null) { + throw new UnknownScannerException( + "Name: " + scannerName + ", already closed?"); + } + region = getRegion(scanner.getRegionInfo().getRegionName()); + } else { + region = getRegion(request.getRegion()); + ClientProtos.Scan protoScan = request.getScan(); + Scan scan = ProtobufUtil.toScan(protoScan); + region.prepareScanner(scan); + if (region.getCoprocessorHost() != null) { + scanner = region.getCoprocessorHost().preScannerOpen(scan); + } + if (scanner == null) { + scanner = region.getScanner(scan); + } + if (region.getCoprocessorHost() != null) { + scanner = region.getCoprocessorHost().postScannerOpen(scan, scanner); + } + scannerId = addScanner(scanner); + scannerName = String.valueOf(scannerId); + ttl = leases.leasePeriod; + } + + if (rows > 0) { + try { + // Remove lease while its being processed in server; protects against case + // where processing of request takes > lease expiration time. + lease = leases.removeLease(scannerName); + List results = new ArrayList(rows); + long currentScanResultSize = 0; + + boolean done = false; + // Call coprocessor. Get region info from scanner. + if (region != null && region.getCoprocessorHost() != null) { + Boolean bypass = region.getCoprocessorHost().preScannerNext( + scanner, results, rows); + if (!results.isEmpty()) { + for (Result r : results) { + for (KeyValue kv : r.raw()) { + currentScanResultSize += kv.heapSize(); + } + } + } + if (bypass != null && bypass.booleanValue()) { + done = true; + } + } + + if (!done) { + List values = new ArrayList(); + for (int i = 0; i < rows + && currentScanResultSize < maxScannerResultSize; i++) { + // Collect values to be returned here + boolean moreRows = scanner.next(values, HRegion.METRIC_NEXTSIZE); + if (!values.isEmpty()) { + for (KeyValue kv : values) { + currentScanResultSize += kv.heapSize(); + } + results.add(new Result(values)); + } + if (!moreRows) { + break; + } + values.clear(); + } + + // coprocessor postNext hook + if (region != null && region.getCoprocessorHost() != null) { + region.getCoprocessorHost().postScannerNext(scanner, results, rows, true); + } + } + + // If the scanner's filter - if any - is done with the scan + // and wants to tell the client to stop the scan. This is done by passing + // a null result, and setting moreResults to false. + if (scanner.isFilterDone() && results.isEmpty()) { + moreResults = false; + results = null; + } else { + for (Result result: results) { + if (result != null) { + builder.addResult(ProtobufUtil.toResult(result)); + } + } + } + } finally { + // We're done. On way out re-add the above removed lease. + // Adding resets expiration time on lease. + if (scanners.containsKey(scannerName)) { + if (lease != null) leases.addLease(lease); + ttl = leases.leasePeriod; + } + } + } + + if (!moreResults || closeScanner) { + ttl = 0; + moreResults = false; + if (region != null && region.getCoprocessorHost() != null) { + if (region.getCoprocessorHost().preScannerClose(scanner)) { + return builder.build(); // bypass + } + } + scanner = scanners.remove(scannerName); + if (scanner != null) { + scanner.close(); + leases.cancelLease(scannerName); + if (region != null && region.getCoprocessorHost() != null) { + region.getCoprocessorHost().postScannerClose(scanner); + } + } + } + + if (ttl > 0) { + builder.setTtl(ttl); + } + builder.setScannerId(scannerId); + builder.setMoreResults(moreResults); + return builder.build(); + } catch (Throwable t) { + if (scannerName != null && + t instanceof NotServingRegionException) { + scanners.remove(scannerName); + } + throw convertThrowableToIOE(cleanup(t)); + } + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Lock a row in a table. + * + * @param controller the RPC controller + * @param request the lock row request + * @throws ServiceException + */ + @Override + public LockRowResponse lockRow(final RpcController controller, + final LockRowRequest request) throws ServiceException { + try { + if (request.getRowCount() != 1) { + throw new DoNotRetryIOException( + "lockRow supports only one row now, not " + request.getRowCount() + " rows"); + } + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + byte[] row = request.getRow(0).toByteArray(); + try { + Integer r = region.obtainRowLock(row); + long lockId = addRowLock(r, region); + LOG.debug("Row lock " + lockId + " explicitly acquired by client"); + LockRowResponse.Builder builder = LockRowResponse.newBuilder(); + builder.setLockId(lockId); + return builder.build(); + } catch (Throwable t) { + throw convertThrowableToIOE(cleanup(t, + "Error obtaining row lock (fsOk: " + this.fsOk + ")")); + } + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Unlock a locked row in a table. + * + * @param controller the RPC controller + * @param request the unlock row request + * @throws ServiceException + */ + @Override + @QosPriority(priority=HIGH_QOS) + public UnlockRowResponse unlockRow(final RpcController controller, + final UnlockRowRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + if (!request.hasLockId()) { + throw new DoNotRetryIOException( + "Invalid unlock rowrequest, missing lock id"); + } + long lockId = request.getLockId(); + String lockName = String.valueOf(lockId); + try { + Integer r = rowlocks.remove(lockName); + if (r == null) { + throw new UnknownRowLockException(lockName); + } + region.releaseRowLock(r); + this.leases.cancelLease(lockName); + LOG.debug("Row lock " + lockId + + " has been explicitly released by client"); + return UnlockRowResponse.newBuilder().build(); + } catch (Throwable t) { + throw convertThrowableToIOE(cleanup(t)); + } + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Atomically bulk load several HFiles into an open region + * @return true if successful, false is failed but recoverably (no action) + * @throws IOException if failed unrecoverably + */ + @Override + public BulkLoadHFileResponse bulkLoadHFile(final RpcController controller, + final BulkLoadHFileRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + List> familyPaths = new ArrayList>(); + for (FamilyPath familyPath: request.getFamilyPathList()) { + familyPaths.add(new Pair( + familyPath.getFamily().toByteArray(), familyPath.getPath())); + } + boolean loaded = region.bulkLoadHFiles(familyPaths); + BulkLoadHFileResponse.Builder builder = BulkLoadHFileResponse.newBuilder(); + builder.setLoaded(loaded); + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol} + * method using the registered protocol handlers. + * {@link CoprocessorProtocol} implementations must be registered per-region + * via the + * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)} + * method before they are available. + * + * @param regionName name of the region against which the invocation is executed + * @param call an {@code Exec} instance identifying the protocol, method name, + * and parameters for the method invocation + * @return an {@code ExecResult} instance containing the region name of the + * invocation and the return value + * @throws IOException if no registered protocol handler is found or an error + * occurs during the invocation + * @see org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol) + */ + @Override + public ExecCoprocessorResponse execCoprocessor(final RpcController controller, + final ExecCoprocessorRequest request) throws ServiceException { + try { + requestCount.incrementAndGet(); + HRegion region = getRegion(request.getRegion()); + ExecCoprocessorResponse.Builder + builder = ExecCoprocessorResponse.newBuilder(); + ClientProtos.Exec call = request.getCall(); + Exec clientCall = ProtobufUtil.toExec(call); + ExecResult result = region.exec(clientCall); + builder.setValue(ProtobufUtil.toParameter(result.getValue())); + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + /** + * Execute multiple actions on a table: get, mutate, and/or execCoprocessor + * + * @param controller the RPC controller + * @param request the multi request + * @throws ServiceException + */ + @Override + public MultiResponse multi(final RpcController controller, + final MultiRequest request) throws ServiceException { + try { + HRegion region = getRegion(request.getRegion()); + MultiResponse.Builder builder = MultiResponse.newBuilder(); + if (request.hasAtomic() && request.getAtomic()) { + List mutates = new ArrayList(); + for (NameBytesPair parameter: request.getActionList()) { + Object action = ProtobufUtil.toObject(parameter); + if (action instanceof Mutate) { + mutates.add((Mutate)action); + } else { + throw new DoNotRetryIOException( + "Unsupported atomic atction type: " + + action.getClass().getName()); + } + } + mutateRows(region, mutates); + } else { + ActionResult.Builder resultBuilder = null; + List puts = new ArrayList(); + for (NameBytesPair parameter: request.getActionList()) { + requestCount.incrementAndGet(); + try { + Object result = null; + Object action = ProtobufUtil.toObject(parameter); + if (action instanceof ClientProtos.Get) { + Get get = ProtobufUtil.toGet((ClientProtos.Get)action); + Integer lock = getLockFromId(get.getLockId()); + Result r = region.get(get, lock); + if (r != null) { + result = ProtobufUtil.toResult(r); + } + } else if (action instanceof Mutate) { + Mutate mutate = (Mutate)action; + MutateType type = mutate.getMutateType(); + if (type != MutateType.PUT) { + if (!puts.isEmpty()) { + put(builder, region, puts); + puts.clear(); + } else if (!region.getRegionInfo().isMetaTable()) { + cacheFlusher.reclaimMemStoreMemory(); + } + } + Result r = null; + switch (type) { + case APPEND: + r = append(region, mutate); + break; + case INCREMENT: + r = increment(region, mutate); + break; + case PUT: + puts.add(mutate); + break; + case DELETE: + Delete delete = ProtobufUtil.toDelete(mutate); + Integer lock = getLockFromId(delete.getLockId()); + region.delete(delete, lock, delete.getWriteToWAL()); + r = new Result(); + break; + default: + throw new DoNotRetryIOException( + "Unsupported mutate type: " + type.name()); + } + if (r != null) { + result = ProtobufUtil.toResult(r); + } + } else if (action instanceof ClientProtos.Exec) { + Exec call = ProtobufUtil.toExec((ClientProtos.Exec)action); + result = region.exec(call).getValue(); + } else { + LOG.debug("Error: invalid action, " + + "it must be a Get, Mutate, or Exec."); + throw new DoNotRetryIOException("Invalid action, " + + "it must be a Get, Mutate, or Exec."); + } + if (result != null) { + if (resultBuilder == null) { + resultBuilder = ActionResult.newBuilder(); + } else { + resultBuilder.clear(); + } + NameBytesPair value = ProtobufUtil.toParameter(result); + resultBuilder.setValue(value); + builder.addResult(resultBuilder.build()); + } + } catch (IOException ie) { + builder.addResult(ResponseConverter.buildActionResult(ie)); + } + } + if (!puts.isEmpty()) { + put(builder, region, puts); + } + } + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + +// End Client methods + + /** + * Find the HRegion based on a region specifier + * + * @param regionSpecifier the region specifier + * @return the corresponding region + * @throws IOException if the specifier is not null, + * but failed to find the region + */ + protected HRegion getRegion( + final RegionSpecifier regionSpecifier) throws IOException { + byte[] value = regionSpecifier.getValue().toByteArray(); + RegionSpecifierType type = regionSpecifier.getType(); + checkOpen(); + switch (type) { + case REGION_NAME: + return getRegion(value); + case ENCODED_REGION_NAME: + String encodedRegionName = Bytes.toString(value); + HRegion region = this.onlineRegions.get(encodedRegionName); + if (region == null) { + throw new NotServingRegionException( + "Region is not online: " + encodedRegionName); + } + return region; + default: + throw new DoNotRetryIOException( + "Unsupported region specifier type: " + type); + } + } + + /** + * Execute an append mutation. + * + * @param region + * @param mutate + * @return + * @throws IOException + */ + protected Result append(final HRegion region, + final Mutate mutate) throws IOException { + Append append = ProtobufUtil.toAppend(mutate); + Result r = null; + if (region.getCoprocessorHost() != null) { + r = region.getCoprocessorHost().preAppend(append); + } + if (r == null) { + Integer lock = getLockFromId(append.getLockId()); + r = region.append(append, lock, append.getWriteToWAL()); + if (region.getCoprocessorHost() != null) { + region.getCoprocessorHost().postAppend(append, r); + } + } + return r; + } + + /** + * Execute an increment mutation. + * + * @param region + * @param mutate + * @return + * @throws IOException + */ + protected Result increment(final HRegion region, + final Mutate mutate) throws IOException { + Increment increment = ProtobufUtil.toIncrement(mutate); + Result r = null; + if (region.getCoprocessorHost() != null) { + r = region.getCoprocessorHost().preIncrement(increment); + } + if (r == null) { + Integer lock = getLockFromId(increment.getLockId()); + r = region.increment(increment, lock, increment.getWriteToWAL()); + if (region.getCoprocessorHost() != null) { + r = region.getCoprocessorHost().postIncrement(increment, r); + } + } + return r; + } + + /** + * Execute a list of put mutations. + * + * @param builder + * @param region + * @param puts + */ + protected void put(final MultiResponse.Builder builder, + final HRegion region, final List puts) { + @SuppressWarnings("unchecked") + Pair[] putsWithLocks = new Pair[puts.size()]; + + try { + ActionResult.Builder resultBuilder = ActionResult.newBuilder(); + NameBytesPair value = ProtobufUtil.toParameter(new Result()); + resultBuilder.setValue(value); + ActionResult result = resultBuilder.build(); + + int i = 0; + for (Mutate put : puts) { + Put p = ProtobufUtil.toPut(put); + Integer lock = getLockFromId(p.getLockId()); + putsWithLocks[i++] = new Pair(p, lock); + builder.addResult(result); + } + + requestCount.addAndGet(puts.size()); + if (!region.getRegionInfo().isMetaTable()) { + cacheFlusher.reclaimMemStoreMemory(); + } + + OperationStatus codes[] = region.put(putsWithLocks); + for (i = 0; i < codes.length; i++) { + if (codes[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) { + result = ResponseConverter.buildActionResult( + new DoNotRetryIOException(codes[i].getExceptionMsg())); + builder.setResult(i, result); + } + } + } catch (IOException ie) { + ActionResult result = ResponseConverter.buildActionResult(ie); + for (int i = 0, n = puts.size(); i < n; i++) { + builder.setResult(i, result); + } + } + } + + /** + * Mutate a list of rows atomically. + * + * @param region + * @param mutates + * @throws IOException + */ + protected void mutateRows(final HRegion region, + final List mutates) throws IOException { + Mutate firstMutate = mutates.get(0); + if (!region.getRegionInfo().isMetaTable()) { + cacheFlusher.reclaimMemStoreMemory(); + } + byte[] row = firstMutate.getRow().toByteArray(); + RowMutations rm = new RowMutations(row); + for (Mutate mutate: mutates) { + MutateType type = mutate.getMutateType(); + switch (mutate.getMutateType()) { + case PUT: + rm.add(ProtobufUtil.toPut(mutate)); + break; + case DELETE: + rm.add(ProtobufUtil.toDelete(mutate)); + break; + default: + throw new DoNotRetryIOException( + "mutate supports atomic put and/or delete, not " + + type.name()); + } + } + region.mutateRow(rm); + } +} diff --git src/main/protobuf/Admin.proto src/main/protobuf/Admin.proto new file mode 100644 index 0000000..132c5dd --- /dev/null +++ src/main/protobuf/Admin.proto @@ -0,0 +1,236 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for Admin service. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "AdminProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; + +message GetRegionInfoRequest { + required RegionSpecifier region = 1; +} + +message GetRegionInfoResponse { + required RegionInfo regionInfo = 1; +} + +/** + * Get a list of store files for a set of column families in a particular region. + * If no column family is specified, get the store files for all column families. + */ +message GetStoreFileListRequest { + required RegionSpecifier region = 1; + repeated bytes columnFamily = 2; +} + +message GetStoreFileListResponse { + repeated string storeFile = 1; +} + +message GetOnlineRegionRequest { +} + +message GetOnlineRegionResponse { + repeated RegionInfo regionInfo = 1; +} + +message OpenRegionRequest { + repeated RegionSpecifier region = 1; + optional uint32 versionOfOfflineNode = 2; +} + +message OpenRegionResponse { + repeated RegionOpeningState openingState = 1; + + enum RegionOpeningState { + OPENED = 0; + ALREADY_OPENED = 1; + FAILED_OPENING = 2; + } +} + +/** + * Closes the specified region and will use or not use ZK during the close + * according to the specified flag. + */ +message CloseRegionRequest { + required RegionSpecifier region = 1; + optional uint32 versionOfClosingNode = 2; + optional bool transitionInZK = 3 [default = true]; +} + +message CloseRegionResponse { + required bool closed = 1; +} + +/** + * Flushes the MemStore of the specified region. + *

+ * This method is synchronous. + */ +message FlushRegionRequest { + required RegionSpecifier region = 1; + optional uint64 ifOlderThanTs = 2; +} + +message FlushRegionResponse { + required uint64 lastFlushTime = 1; + optional bool flushed = 2; +} + +/** + * Splits the specified region. + *

+ * This method currently flushes the region and then forces a compaction which + * will then trigger a split. The flush is done synchronously but the + * compaction is asynchronous. + */ +message SplitRegionRequest { + required RegionSpecifier region = 1; + optional bytes splitPoint = 2; +} + +message SplitRegionResponse { +} + +/** + * Compacts the specified region. Performs a major compaction if specified. + *

+ * This method is asynchronous. + */ +message CompactRegionRequest { + required RegionSpecifier region = 1; + optional bool major = 2; +} + +message CompactRegionResponse { +} + +message UUID { + required uint64 leastSigBits = 1; + required uint64 mostSigBits = 2; +} + +// Protocol buffer version of HLog +message WALEntry { + required WALKey walKey = 1; + required WALEdit edit = 2; + + // Protocol buffer version of HLogKey + message WALKey { + required bytes encodedRegionName = 1; + required bytes tableName = 2; + required uint64 logSequenceNumber = 3; + required uint64 writeTime = 4; + optional UUID clusterId = 5; + } + + message WALEdit { + repeated bytes keyValue = 1; + repeated FamilyScope familyScope = 2; + + enum ScopeType { + REPLICATION_SCOPE_LOCAL = 0; + REPLICATION_SCOPE_GLOBAL = 1; + } + + message FamilyScope { + required bytes family = 1; + required ScopeType scopeType = 2; + } + } +} + +/** + * Replicates the given entries. The guarantee is that the given entries + * will be durable on the slave cluster if this method returns without + * any exception. + * hbase.replication has to be set to true for this to work. + */ +message ReplicateWALEntryRequest { + repeated WALEntry walEntry = 1; +} + +message ReplicateWALEntryResponse { +} + +// Replacement for rollHLogWriter in HRegionInterface +message RollWALWriterRequest { +} + +message RollWALWriterResponse { + // A list of encoded name of regions to flush + repeated bytes regionToFlush = 1; +} + +message StopServerRequest { + required string reason = 1; +} + +message StopServerResponse { +} + +message GetServerInfoRequest { +} + +message GetServerInfoResponse { + required ServerName serverName = 1; +} + +service AdminService { + rpc getRegionInfo(GetRegionInfoRequest) + returns(GetRegionInfoResponse); + + rpc getStoreFileList(GetStoreFileListRequest) + returns(GetStoreFileListResponse); + + rpc getOnlineRegion(GetOnlineRegionRequest) + returns(GetOnlineRegionResponse); + + rpc openRegion(OpenRegionRequest) + returns(OpenRegionResponse); + + rpc closeRegion(CloseRegionRequest) + returns(CloseRegionResponse); + + rpc flushRegion(FlushRegionRequest) + returns(FlushRegionResponse); + + rpc splitRegion(SplitRegionRequest) + returns(SplitRegionResponse); + + rpc compactRegion(CompactRegionRequest) + returns(CompactRegionResponse); + + rpc replicateWALEntry(ReplicateWALEntryRequest) + returns(ReplicateWALEntryResponse); + + rpc rollWALWriter(RollWALWriterRequest) + returns(RollWALWriterResponse); + + rpc getServerInfo(GetServerInfoRequest) + returns(GetServerInfoResponse); + + rpc stopServer(StopServerRequest) + returns(StopServerResponse); +} diff --git src/main/protobuf/Client.proto src/main/protobuf/Client.proto new file mode 100644 index 0000000..a7a19e0 --- /dev/null +++ src/main/protobuf/Client.proto @@ -0,0 +1,356 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for Client service. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "ClientProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; + +/** + * Container for a list of column qualifier names of a family. + */ +message Column { + required bytes family = 1; + repeated bytes qualifier = 2; +} + +/** + * The protocol buffer version of Get + */ +message Get { + required bytes row = 1; + repeated Column column = 2; + repeated NameBytesPair attribute = 3; + optional uint64 lockId = 4; + optional NameBytesPair filter = 5; + optional TimeRange timeRange = 6; + optional uint32 maxVersions = 7 [default = 1]; + optional bool cacheBlocks = 8 [default = true]; +} + +/** + * For performance reason, we don't use KeyValue + * here. We use the actual KeyValue bytes. + */ +message Result { + repeated bytes keyValueBytes = 1; +} + +/** + * The get request. Perform a single Get operation. + * Unless existenceOnly is specified, return all the requested data + * for the row that matches exactly, or the one that immediately + * precedes it if closestRowBefore is specified. + * + * If existenceOnly is set, only the existence will be returned. + */ +message GetRequest { + required RegionSpecifier region = 1; + required Get get = 2; + + // If the row to get doesn't exist, return the + // closest row before. + optional bool closestRowBefore = 3; + + // The result isn't asked for, just check for + // the existence. If specified, closestRowBefore + // will be ignored + optional bool existenceOnly = 4; +} + +message GetResponse { + optional Result result = 1; + + // used for Get to check existence only + optional bool exists = 2; +} + +/** + * Condition to check if the value of a given cell (row, + * family, qualifier) matches a value via a given comparator. + * + * Condition is used in check and mutate operations. + */ +message Condition { + required bytes row = 1; + required bytes family = 2; + required bytes qualifier = 3; + required CompareType compareType = 4; + required NameBytesPair comparator = 5; + + enum CompareType { + LESS = 0; + LESS_OR_EQUAL = 1; + EQUAL = 2; + NOT_EQUAL = 3; + GREATER_OR_EQUAL = 4; + GREATER = 5; + NO_OP = 6; + } +} + +/** + * A specific mutate inside a mutate request. + * It can be an append, increment, put or delete based + * on the mutate type. + */ +message Mutate { + required bytes row = 1; + required MutateType mutateType = 2; + repeated ColumnValue columnValue = 3; + repeated NameBytesPair attribute = 4; + optional uint64 timestamp = 5; + optional uint64 lockId = 6; + optional bool writeToWAL = 7 [default = true]; + + // For some mutate, result may be returned, in which case, + // time range can be specified for potential performance gain + optional TimeRange timeRange = 10; + + enum MutateType { + APPEND = 0; + INCREMENT = 1; + PUT = 2; + DELETE = 3; + } + + enum DeleteType { + DELETE_ONE_VERSION = 0; + DELETE_MULTIPLE_VERSIONS = 1; + DELETE_FAMILY = 2; + } + + message ColumnValue { + required bytes family = 1; + repeated QualifierValue qualifierValue = 2; + + message QualifierValue { + optional bytes qualifier = 1; + optional bytes value = 2; + optional uint64 timestamp = 3; + optional DeleteType deleteType = 4; + } + } +} + +/** + * The mutate request. Perform a single Mutate operation. + * + * Optionally, you can specify a condition. The mutate + * will take place only if the condition is met. Otherwise, + * the mutate will be ignored. In the response result, + * parameter processed is used to indicate if the mutate + * actually happened. + */ +message MutateRequest { + required RegionSpecifier region = 1; + required Mutate mutate = 2; + optional Condition condition = 3; +} + +message MutateResponse { + optional Result result = 1; + + // used for mutate to indicate processed only + optional bool processed = 2; +} + +/** + * Instead of get from a table, you can scan it with optional filters. + * You can specify the row key range, time range, the columns/families + * to scan and so on. + * + * This scan is used the first time in a scan request. The response of + * the initial scan will return a scanner id, which should be used to + * fetch result batches later on before it is closed. + */ +message Scan { + repeated Column column = 1; + repeated NameBytesPair attribute = 2; + optional bytes startRow = 3; + optional bytes stopRow = 4; + optional NameBytesPair filter = 5; + optional TimeRange timeRange = 6; + optional uint32 maxVersions = 7 [default = 1]; + optional bool cacheBlocks = 8 [default = true]; + optional uint32 batchSize = 9; +} + +/** + * A scan request. Initially, it should specify a scan. Later on, you + * can use the scanner id returned to fetch result batches with a different + * scan request. + * + * The scanner will remain open if there are more results, and it's not + * asked to be closed explicitly. + * + * You can fetch the results and ask the scanner to be closed to save + * a trip if you are not interested in remaining results. + */ +message ScanRequest { + optional RegionSpecifier region = 1; + optional Scan scan = 2; + optional uint64 scannerId = 3; + optional uint32 numberOfRows = 4; + optional bool closeScanner = 5; +} + +/** + * The scan response. If there are no more results, moreResults will + * be false. If it is not specified, it means there are more. + */ +message ScanResponse { + repeated Result result = 1; + optional uint64 scannerId = 2; + optional bool moreResults = 3; + optional uint32 ttl = 4; +} + +message LockRowRequest { + required RegionSpecifier region = 1; + repeated bytes row = 2; +} + +message LockRowResponse { + required uint64 lockId = 1; + optional uint32 ttl = 2; +} + +message UnlockRowRequest { + required RegionSpecifier region = 1; + required uint64 lockId = 2; +} + +message UnlockRowResponse { +} + +/** + * Atomically bulk load multiple HFiles (say from different column families) + * into an open region. + */ +message BulkLoadHFileRequest { + required RegionSpecifier region = 1; + repeated FamilyPath familyPath = 2; + + message FamilyPath { + required bytes family = 1; + required string path = 2; + } +} + +message BulkLoadHFileResponse { + required bool loaded = 1; +} + +/** + * An individual coprocessor call. You must specify the protocol, + * the method, and the row to which the call will be executed. + * + * You can specify the configuration settings in the property list. + * + * The parameter list has the parameters used for the method. + * A parameter is a pair of parameter name and the binary parameter + * value. The name is the parameter class name. The value is the + * binary format of the parameter, for example, protocol buffer + * encoded value. + */ +message Exec { + required bytes row = 1; + required string protocolName = 2; + required string methodName = 3; + repeated NameStringPair property = 4; + repeated NameBytesPair parameter = 5; +} + + /** + * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol} + * method using the registered protocol handlers. + * {@link CoprocessorProtocol} implementations must be registered via the + * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol( + * Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)} + * method before they are available. + */ +message ExecCoprocessorRequest { + required RegionSpecifier region = 1; + required Exec call = 2; +} + +message ExecCoprocessorResponse { + required NameBytesPair value = 1; +} + +/** + * An individual action result. The result will in the + * same order as the action in the request. If an action + * returns a value, it is set in value field. If it doesn't + * return anything, the result will be empty. If an action + * fails to execute due to any exception, the exception + * is returned as a stringified parameter. + */ +message ActionResult { + optional NameBytesPair value = 1; + optional NameBytesPair exception = 2; +} + +/** + * You can execute a list of actions on a given region in order. + * + * If it is a list of mutate actions, atomic can be set + * to make sure they can be processed atomically, just like + * RowMutations. + */ +message MultiRequest { + required RegionSpecifier region = 1; + repeated NameBytesPair action = 2; + optional bool atomic = 3; +} + +message MultiResponse { + repeated ActionResult result = 1; +} + +service ClientService { + rpc get(GetRequest) + returns(GetResponse); + + rpc mutate(MutateRequest) + returns(MutateResponse); + + rpc scan(ScanRequest) + returns(ScanResponse); + + rpc lockRow(LockRowRequest) + returns(LockRowResponse); + + rpc unlockRow(UnlockRowRequest) + returns(UnlockRowResponse); + + rpc bulkLoadHFile(BulkLoadHFileRequest) + returns(BulkLoadHFileResponse); + + rpc execCoprocessor(ExecCoprocessorRequest) + returns(ExecCoprocessorResponse); + + rpc multi(MultiRequest) + returns(MultiResponse); +} diff --git src/main/protobuf/RegionAdmin.proto src/main/protobuf/RegionAdmin.proto deleted file mode 100644 index c64d68b..0000000 --- src/main/protobuf/RegionAdmin.proto +++ /dev/null @@ -1,236 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for RegionAdmin service. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "RegionAdminProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; - -message GetRegionInfoRequest { - required RegionSpecifier region = 1; -} - -message GetRegionInfoResponse { - required RegionInfo regionInfo = 1; -} - -/** - * Get a list of store files for a set of column families in a particular region. - * If no column family is specified, get the store files for all column families. - */ -message GetStoreFileListRequest { - required RegionSpecifier region = 1; - repeated bytes columnFamily = 2; -} - -message GetStoreFileListResponse { - repeated string storeFile = 1; -} - -message GetOnlineRegionRequest { -} - -message GetOnlineRegionResponse { - repeated RegionInfo regionInfo = 1; -} - -message OpenRegionRequest { - repeated RegionSpecifier region = 1; - optional uint32 versionOfOfflineNode = 2; -} - -message OpenRegionResponse { - repeated RegionOpeningState openingState = 1; - - enum RegionOpeningState { - OPENED = 0; - ALREADY_OPENED = 1; - FAILED_OPENING = 2; - } -} - -/** - * Closes the specified region and will use or not use ZK during the close - * according to the specified flag. - */ -message CloseRegionRequest { - required RegionSpecifier region = 1; - optional uint32 versionOfClosingNode = 2; - optional bool transitionInZK = 3 [default = true]; -} - -message CloseRegionResponse { - required bool closed = 1; -} - -/** - * Flushes the MemStore of the specified region. - *

- * This method is synchronous. - */ -message FlushRegionRequest { - required RegionSpecifier region = 1; - optional uint64 ifOlderThanTs = 2; -} - -message FlushRegionResponse { - required uint64 lastFlushTime = 1; - optional bool flushed = 2; -} - -/** - * Splits the specified region. - *

- * This method currently flushes the region and then forces a compaction which - * will then trigger a split. The flush is done synchronously but the - * compaction is asynchronous. - */ -message SplitRegionRequest { - required RegionSpecifier region = 1; - optional bytes splitPoint = 2; -} - -message SplitRegionResponse { -} - -/** - * Compacts the specified region. Performs a major compaction if specified. - *

- * This method is asynchronous. - */ -message CompactRegionRequest { - required RegionSpecifier region = 1; - optional bool major = 2; -} - -message CompactRegionResponse { -} - -message UUID { - required uint64 leastSigBits = 1; - required uint64 mostSigBits = 2; -} - -// Protocol buffer version of HLog -message WALEntry { - required WALKey walKey = 1; - required WALEdit edit = 2; - - // Protocol buffer version of HLogKey - message WALKey { - required bytes encodedRegionName = 1; - required bytes tableName = 2; - required uint64 logSequenceNumber = 3; - required uint64 writeTime = 4; - optional UUID clusterId = 5; - } - - message WALEdit { - repeated KeyValue keyValue = 1; - repeated FamilyScope familyScope = 2; - - enum ScopeType { - REPLICATION_SCOPE_LOCAL = 0; - REPLICATION_SCOPE_GLOBAL = 1; - } - - message FamilyScope { - required bytes family = 1; - required ScopeType scopeType = 2; - } - } -} - -/** - * Replicates the given entries. The guarantee is that the given entries - * will be durable on the slave cluster if this method returns without - * any exception. - * hbase.replication has to be set to true for this to work. - */ -message ReplicateWALEntryRequest { - repeated WALEntry walEntry = 1; -} - -message ReplicateWALEntryResponse { -} - -// Replacement for rollHLogWriter in HRegionInterface -message RollWALWriterRequest { -} - -message RollWALWriterResponse { - // A list of encoded name of regions to flush - repeated bytes regionToFlush = 1; -} - -message StopServerRequest { - required string reason = 1; -} - -message StopServerResponse { -} - -message GetServerInfoRequest { -} - -message GetServerInfoResponse { - required ServerName serverName = 1; -} - -service RegionAdminService { - rpc getRegionInfo(GetRegionInfoRequest) - returns(GetRegionInfoResponse); - - rpc getStoreFileList(GetStoreFileListRequest) - returns(GetStoreFileListResponse); - - rpc getOnlineRegion(GetOnlineRegionRequest) - returns(GetOnlineRegionResponse); - - rpc openRegion(OpenRegionRequest) - returns(OpenRegionResponse); - - rpc closeRegion(CloseRegionRequest) - returns(CloseRegionResponse); - - rpc flushRegion(FlushRegionRequest) - returns(FlushRegionResponse); - - rpc splitRegion(SplitRegionRequest) - returns(SplitRegionResponse); - - rpc compactRegion(CompactRegionRequest) - returns(CompactRegionResponse); - - rpc replicateWALEntry(ReplicateWALEntryRequest) - returns(ReplicateWALEntryResponse); - - rpc rollWALWriter(RollWALWriterRequest) - returns(RollWALWriterResponse); - - rpc getServerInfo(GetServerInfoRequest) - returns(GetServerInfoResponse); - - rpc stopServer(StopServerRequest) - returns(StopServerResponse); -} diff --git src/main/protobuf/RegionClient.proto src/main/protobuf/RegionClient.proto deleted file mode 100644 index 358382b..0000000 --- src/main/protobuf/RegionClient.proto +++ /dev/null @@ -1,372 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are used for RegionClient service. - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "RegionClientProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "hbase.proto"; - -/** - * Container for a list of column qualifier names of a family. - */ -message Column { - required bytes family = 1; - repeated bytes qualifier = 2; -} - -message Attribute { - required string name = 1; - optional bytes value = 2; -} - -/** - * The protocol buffer version of Get - */ -message Get { - required bytes row = 1; - repeated Column column = 2; - repeated Attribute attribute = 3; - optional uint64 lockId = 4; - optional Parameter filter = 5; - optional TimeRange timeRange = 6; - optional uint32 maxVersions = 7 [default = 1]; - optional bool cacheBlocks = 8 [default = true]; -} - -message Result { - repeated KeyValue value = 1; -} - -/** - * The get request. Perform a single Get operation. - * Unless existenceOnly is specified, return all the requested data - * for the row that matches exactly, or the one that immediately - * precedes it if closestRowBefore is specified. - * - * If existenceOnly is set, only the existence will be returned. - */ -message GetRequest { - required RegionSpecifier region = 1; - required Get get = 2; - - // If the row to get doesn't exist, return the - // closest row before. - optional bool closestRowBefore = 3; - - // The result isn't asked for, just check for - // the existence. If specified, closestRowBefore - // will be ignored - optional bool existenceOnly = 4; -} - -message GetResponse { - optional Result result = 1; - - // used for Get to check existence only - optional bool exists = 2; -} - -/** - * Condition to check if the value of a given cell (row, - * family, qualifier) matches a value via a given comparator. - * The value is optional since some comparator may not require - * a value to compare, for example, checking null. - * - * Condition is used in check and mutate operations. - */ -message Condition { - required bytes row = 1; - required bytes family = 2; - required bytes qualifier = 3; - required CompareType compareType = 4; - required Comparator comparator = 5; - optional bytes value = 6; - - enum CompareType { - LESS = 0; - LESS_OR_EQUAL = 1; - EQUAL = 2; - NOT_EQUAL = 3; - GREATER_OR_EQUAL = 4; - GREATER = 5; - NO_OP = 6; - } - - enum Comparator { - BINARY_COMPARATOR = 0; - BINARY_PREFIX_COMPARATOR = 1; - BIT_AND_COMPARATOR = 2; - BIT_OR_COMPARATOR = 3; - BIT_XOR_COMPARATOR = 4; - NULL_COMPARATOR = 5; - REGEX_STRING_COMPARATOR = 6; - SUBSTRING_COMPARATOR = 7; - } -} - -/** - * A specific mutate inside a mutate request. - * It can be an append, increment, put or delete based - * on the mutate type. - */ -message Mutate { - required bytes row = 1; - required MutateType mutateType = 2; - repeated ColumnValue columnValue = 3; - repeated Attribute attribute = 4; - optional uint64 timestamp = 5; - optional uint64 lockId = 6; - optional bool writeToWAL = 7 [default = true]; - - // For some mutate, result may be returned, in which case, - // time range can be specified for potential performance gain - optional TimeRange timeRange = 10; - - enum MutateType { - APPEND = 0; - INCREMENT = 1; - PUT = 2; - DELETE = 3; - DELETE_COLUMN = 4; - DELETE_FAMILY = 5; - } - - message ColumnValue { - required bytes family = 1; - repeated QualifierValue qualifierValue = 2; - - // Default timestamp for qalifier values, - // or timestamp of the column family to be deleted - optional uint64 timestamp = 3; - - message QualifierValue { - required bytes qualifier = 1; - optional bytes value = 2; - optional uint64 timestamp = 3; - } - } -} - -/** - * The mutate request. Perform a single Mutate operation. - * - * Optionally, you can specify a condition. The mutate - * will take place only if the condition is met. Otherwise, - * the mutate will be ignored. In the response result, - * parameter processed is used to indicate if the mutate - * actually happened. - */ -message MutateRequest { - required RegionSpecifier region = 1; - required Mutate mutate = 2; - optional Condition condition = 3; -} - -message MutateResponse { - optional Result result = 1; - - // used for mutate to indicate processed only - optional bool processed = 2; -} - -/** - * Instead of get from a table, you can scan it with optional filters. - * You can specify the row key range, time range, the columns/families - * to scan and so on. - * - * This scan is used the first time in a scan request. The response of - * the initial scan will return a scanner id, which should be used to - * fetch result batches later on before it is closed. - */ -message Scan { - repeated Column column = 1; - repeated Attribute attribute = 2; - optional bytes startRow = 3; - optional bytes stopRow = 4; - optional Parameter filter = 5; - optional TimeRange timeRange = 6; - optional uint32 maxVersions = 7 [default = 1]; - optional bool cacheBlocks = 8 [default = true]; - optional uint32 rowsToCache = 9; - optional uint32 batchSize = 10; -} - -/** - * A scan request. Initially, it should specify a scan. Later on, you - * can use the scanner id returned to fetch result batches with a different - * scan request. - * - * The scanner will remain open if there are more results, and it's not - * asked to be closed explicitly. - * - * You can fetch the results and ask the scanner to be closed to save - * a trip if you are not interested in remaining results. - */ -message ScanRequest { - optional uint64 scannerId = 1; - optional Scan scan = 2; - optional uint32 numberOfRows = 3; - optional bool closeScanner = 4; -} - -/** - * The scan response. If there are no more results, moreResults will - * be false. If it is not specified, it means there are more. - */ -message ScanResponse { - repeated Result result = 1; - optional uint64 scannerId = 2; - optional bool moreResults = 3; - optional uint32 ttl = 4; -} - -message LockRowRequest { - required RegionSpecifier region = 1; - repeated bytes row = 2; -} - -message LockRowResponse { - required uint64 lockId = 1; - optional uint32 ttl = 2; -} - -message UnlockRowRequest { - required RegionSpecifier region = 1; - required uint64 lockId = 2; -} - -message UnlockRowResponse { -} - -/** - * Atomically bulk load multiple HFiles (say from different column families) - * into an open region. - */ -message BulkLoadHFileRequest { - required RegionSpecifier region = 1; - repeated FamilyPath familyPath = 2; - - message FamilyPath { - required bytes family = 1; - required string path = 2; - } -} - -message BulkLoadHFileResponse { - required bool loaded = 1; -} - -message Parameter { - required string type = 1; - optional bytes binaryValue = 2; -} - -message Property { - required string name = 1; - required string value = 2; -} - -/** - * An individual coprocessor call. You must specify the protocol, - * the method, and the row to which the call will be executed. - * - * You can specify the configuration settings in the property list. - * - * The parameter list has the parameters used for the method. - * A parameter is a pair of parameter name and the binary parameter - * value. The name is the parameter class name. The value is the - * binary format of the parameter, for example, protocol buffer - * encoded value. - */ -message Exec { - required bytes row = 1; - required string protocolName = 2; - required string methodName = 3; - repeated Property property = 4; - repeated Parameter parameter = 5; -} - - /** - * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol} - * method using the registered protocol handlers. - * {@link CoprocessorProtocol} implementations must be registered via the - * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol( - * Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)} - * method before they are available. - */ -message ExecCoprocessorRequest { - required RegionSpecifier region = 1; - required Exec call = 2; -} - -message ExecCoprocessorResponse { - required bytes regionName = 1; - required Parameter value = 2; -} - -/** - * You can execute a list of actions on regions assigned - * to the same region server, if you can't find an individual - * call which meets your requirement. - * - * The multi request can have a list of requests. Each request - * should be a protocol buffer encoded request such as GetRequest, - * MutateRequest, ExecCoprocessorRequest. - * - * If the list contains multiple mutate requests only, atomic can - * be set to make sure they can be processed atomically. - */ -message MultiRequest { - repeated Parameter request = 1; - optional bool atomic = 2; -} - -message MultiResponse { - repeated Parameter response = 1; -} - -service RegionClientService { - rpc get(GetRequest) - returns(GetResponse); - - rpc mutate(MutateRequest) - returns(MutateResponse); - - rpc scan(ScanRequest) - returns(ScanResponse); - - rpc lockRow(LockRowRequest) - returns(LockRowResponse); - - rpc unlockRow(UnlockRowRequest) - returns(UnlockRowResponse); - - rpc bulkLoadHFile(BulkLoadHFileRequest) - returns(BulkLoadHFileResponse); - - rpc execCoprocessor(ExecCoprocessorRequest) - returns(ExecCoprocessorResponse); - - rpc multi(MultiRequest) - returns(MultiResponse); -} diff --git src/main/protobuf/hbase.proto src/main/protobuf/hbase.proto index da78788..12e6053 100644 --- src/main/protobuf/hbase.proto +++ src/main/protobuf/hbase.proto @@ -101,3 +101,16 @@ message ServerName { optional uint32 port = 2; optional uint64 startCode = 3; } + +// Comment data structures + +message NameStringPair { + required string name = 1; + required string value = 2; +} + +message NameBytesPair { + required string name = 1; + optional bytes value = 2; +} + diff --git src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java index c7284dc..d6ae0e2 100644 --- src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java +++ src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java @@ -41,6 +41,9 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RetriesExhaustedException; import org.apache.hadoop.hbase.client.ServerCallable; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Writables; @@ -58,6 +61,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + /** * Test {@link CatalogTracker} */ @@ -131,13 +137,16 @@ public class TestCatalogTracker { /** * Test interruptable while blocking wait on root and meta. * @throws IOException + * @throws ServiceException * @throws InterruptedException */ @Test public void testInterruptWaitOnMetaAndRoot() - throws IOException, InterruptedException { - HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - HConnection connection = mockConnection(implementation); + throws IOException, InterruptedException, ServiceException { + final ClientProtocol client = Mockito.mock(ClientProtocol.class); + HConnection connection = mockConnection(null, client); try { + Mockito.when(client.get((RpcController)Mockito.any(), (GetRequest)Mockito.any())). + thenReturn(GetResponse.newBuilder().build()); final CatalogTracker ct = constructAndStartCatalogTracker(connection); ServerName hsa = ct.getRootLocation(); Assert.assertNull(hsa); @@ -176,10 +185,11 @@ public class TestCatalogTracker { */ @Test public void testServerNotRunningIOException() - throws IOException, InterruptedException, KeeperException { + throws IOException, InterruptedException, KeeperException, ServiceException { // Mock an HRegionInterface. final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - HConnection connection = mockConnection(implementation); + final ClientProtocol client = Mockito.mock(ClientProtocol.class); + HConnection connection = mockConnection(implementation, client); try { // If a 'getRegionInfo' is called on mocked HRegionInterface, throw IOE // the first time. 'Succeed' the second time we are called. @@ -198,6 +208,8 @@ public class TestCatalogTracker { Mockito.when(connection.getRegionServerWithRetries((ServerCallable)Mockito.any())). thenReturn(getMetaTableRowResult()); + Mockito.when(client.get((RpcController)Mockito.any(), (GetRequest)Mockito.any())). + thenReturn(GetResponse.newBuilder().build()); // Now start up the catalogtracker with our doctored Connection. final CatalogTracker ct = constructAndStartCatalogTracker(connection); try { @@ -245,17 +257,18 @@ public class TestCatalogTracker { * @throws IOException * @throws InterruptedException * @throws KeeperException + * @throws ServiceException */ @Test public void testGetMetaServerConnectionFails() - throws IOException, InterruptedException, KeeperException { - // Mock an HRegionInterface. - final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - HConnection connection = mockConnection(implementation); + throws IOException, InterruptedException, KeeperException, ServiceException { + // Mock an ClientProtocol. + final ClientProtocol implementation = Mockito.mock(ClientProtocol.class); + HConnection connection = mockConnection(null, implementation); try { // If a 'get' is called on mocked interface, throw connection refused. - Mockito.when(implementation.get((byte[]) Mockito.any(), (Get) Mockito.any())). - thenThrow(new ConnectException("Connection refused")); + Mockito.when(implementation.get((RpcController) Mockito.any(), (GetRequest) Mockito.any())). + thenThrow(new ServiceException(new ConnectException("Connection refused"))); // Now start up the catalogtracker with our doctored Connection. final CatalogTracker ct = constructAndStartCatalogTracker(connection); try { @@ -371,7 +384,7 @@ public class TestCatalogTracker { // to make our test work. // Mock an HRegionInterface. final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - HConnection connection = mockConnection(implementation); + HConnection connection = mockConnection(implementation, null); try { // Now the ct is up... set into the mocks some answers that make it look // like things have been getting assigned. Make it so we'll return a @@ -419,6 +432,7 @@ public class TestCatalogTracker { /** * @param implementation An {@link HRegionInterface} instance; you'll likely * want to pass a mocked HRS; can be null. + * @param client A mocked ClientProtocol instance, can be null * @return Mock up a connection that returns a {@link Configuration} when * {@link HConnection#getConfiguration()} is called, a 'location' when * {@link HConnection#getRegionLocation(byte[], byte[], boolean)} is called, @@ -429,7 +443,8 @@ public class TestCatalogTracker { * when done with this mocked Connection. * @throws IOException */ - private HConnection mockConnection(final HRegionInterface implementation) + private HConnection mockConnection( + final HRegionInterface implementation, final ClientProtocol client) throws IOException { HConnection connection = HConnectionTestingUtility.getMockedConnection(UTIL.getConfiguration()); @@ -449,6 +464,11 @@ public class TestCatalogTracker { Mockito.when(connection.getHRegionConnection(Mockito.anyString(), Mockito.anyInt())). thenReturn(implementation); } + if (client != null) { + // If a call to getClient, return this implementation. + Mockito.when(connection.getClient(Mockito.anyString(), Mockito.anyInt())). + thenReturn(client); + } return connection; } diff --git src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java index 0042468..3cfc02b 100644 --- src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java +++ src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java @@ -31,8 +31,10 @@ import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HConnectionTestingUtility; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @@ -42,6 +44,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + /** * Test MetaReader/Editor but without spinning up a cluster. * We mock regionserver back and forth (we do spin up a zk cluster). @@ -82,7 +87,8 @@ public class TestMetaReaderEditorNoCluster { * @throws InterruptedException */ @Test - public void testRideOverServerNotRunning() throws IOException, InterruptedException { + public void testRideOverServerNotRunning() + throws IOException, InterruptedException, ServiceException { // Need a zk watcher. ZooKeeperWatcher zkw = new ZooKeeperWatcher(UTIL.getConfiguration(), this.getClass().getSimpleName(), ABORTABLE, true); @@ -92,27 +98,16 @@ public class TestMetaReaderEditorNoCluster { HConnection connection = null; CatalogTracker ct = null; try { - // Mock an HRegionInterface. Our mock implementation will fail a few + // Mock an ClientProtocol. Our mock implementation will fail a few // times when we go to open a scanner. - final HRegionInterface implementation = Mockito.mock(HRegionInterface.class); - // When openScanner called throw IOE 'Server not running' a few times + final ClientProtocol implementation = Mockito.mock(ClientProtocol.class); + // When scan called throw IOE 'Server not running' a few times // before we return a scanner id. Whats WEIRD is that these // exceptions do not show in the log because they are caught and only // printed if we FAIL. We eventually succeed after retry so these don't // show. We will know if they happened or not because we will ask - // mockito at the end of this test to verify that openscanner was indeed + // mockito at the end of this test to verify that scan was indeed // called the wanted number of times. - final long scannerid = 123L; - Mockito.when(implementation.openScanner((byte [])Mockito.any(), - (Scan)Mockito.any())). - thenThrow(new IOException("Server not running (1 of 3)")). - thenThrow(new IOException("Server not running (2 of 3)")). - thenThrow(new IOException("Server not running (3 of 3)")). - thenReturn(scannerid); - // Make it so a verifiable answer comes back when next is called. Return - // the verifiable answer and then a null so we stop scanning. Our - // verifiable answer is something that looks like a row in META with - // a server and startcode that is that of the above defined servername. List kvs = new ArrayList(); final byte [] rowToVerify = Bytes.toBytes("rowToVerify"); kvs.add(new KeyValue(rowToVerify, @@ -124,10 +119,19 @@ public class TestMetaReaderEditorNoCluster { kvs.add(new KeyValue(rowToVerify, HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn.getStartcode()))); - final Result [] result = new Result [] {new Result(kvs)}; - Mockito.when(implementation.next(Mockito.anyLong(), Mockito.anyInt())). - thenReturn(result). - thenReturn(null); + final Result [] results = new Result [] {new Result(kvs)}; + ScanResponse.Builder builder = ScanResponse.newBuilder(); + for (Result result: results) { + builder.addResult(ProtobufUtil.toResult(result)); + } + Mockito.when(implementation.scan( + (RpcController)Mockito.any(), (ScanRequest)Mockito.any())). + thenThrow(new ServiceException("Server not running (1 of 3)")). + thenThrow(new ServiceException("Server not running (2 of 3)")). + thenThrow(new ServiceException("Server not running (3 of 3)")). + thenReturn(ScanResponse.newBuilder().setScannerId(1234567890L).build()) + .thenReturn(builder.build()).thenReturn( + ScanResponse.newBuilder().setMoreResults(false).build()); // Associate a spied-upon HConnection with UTIL.getConfiguration. Need // to shove this in here first so it gets picked up all over; e.g. by @@ -150,7 +154,7 @@ public class TestMetaReaderEditorNoCluster { // Now shove our HRI implementation into the spied-upon connection. Mockito.doReturn(implementation). - when(connection).getHRegionConnection(Mockito.anyString(), Mockito.anyInt()); + when(connection).getClient(Mockito.anyString(), Mockito.anyInt()); // Now start up the catalogtracker with our doctored Connection. ct = new CatalogTracker(zkw, null, connection, ABORTABLE, 0); @@ -160,10 +164,10 @@ public class TestMetaReaderEditorNoCluster { assertTrue(hris.size() == 1); assertTrue(hris.firstEntry().getKey().equals(HRegionInfo.FIRST_META_REGIONINFO)); assertTrue(Bytes.equals(rowToVerify, hris.firstEntry().getValue().getRow())); - // Finally verify that openscanner was called four times -- three times - // with exception and then on 4th attempt we succeed. - Mockito.verify(implementation, Mockito.times(4)). - openScanner((byte [])Mockito.any(), (Scan)Mockito.any()); + // Finally verify that scan was called four times -- three times + // with exception and then on 4th, 5th and 6th attempt we succeed + Mockito.verify(implementation, Mockito.times(6)). + scan((RpcController)Mockito.any(), (ScanRequest)Mockito.any()); } finally { if (ct != null) ct.stop(); HConnectionManager.deleteConnection(UTIL.getConfiguration(), true); diff --git src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java index e34d8bc..8af0f91 100644 --- src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java +++ src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.HConnectionManager.HConnectionImplementation; import org.apache.hadoop.hbase.client.HConnectionManager.HConnectionKey; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; import org.mockito.Mockito; /** @@ -92,7 +93,8 @@ public class HConnectionTestingUtility { * @throws IOException */ public static HConnection getMockedConnectionAndDecorate(final Configuration conf, - final HRegionInterface implementation, final ServerName sn, final HRegionInfo hri) + final HRegionInterface implementation, final ClientProtocol client, + final ServerName sn, final HRegionInfo hri) throws IOException { HConnection c = HConnectionTestingUtility.getMockedConnection(conf); Mockito.doNothing().when(c).close(); @@ -108,6 +110,11 @@ public class HConnectionTestingUtility { Mockito.when(c.getHRegionConnection(Mockito.anyString(), Mockito.anyInt())). thenReturn(implementation); } + if (client != null) { + // If a call to getClient, return this client. + Mockito.when(c.getClient(Mockito.anyString(), Mockito.anyInt())). + thenReturn(client); + } return c; } diff --git src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java index f2f8ee3..d8e3ee1 100644 --- src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java +++ src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java @@ -99,6 +99,7 @@ import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; import com.google.protobuf.Message; +import com.google.protobuf.RpcController; @Category(SmallTests.class) public class TestHbaseObjectWritable extends TestCase { @@ -523,6 +524,7 @@ public class TestHbaseObjectWritable extends TestCase { assertEquals(80,HbaseObjectWritable.getClassCode(Message.class).intValue()); assertEquals(81,HbaseObjectWritable.getClassCode(Array.class).intValue()); + assertEquals(82,HbaseObjectWritable.getClassCode(RpcController.class).intValue()); } /** @@ -531,7 +533,7 @@ public class TestHbaseObjectWritable extends TestCase { * note on the test above. */ public void testGetNextObjectCode(){ - assertEquals(82,HbaseObjectWritable.getNextClassCode()); + assertEquals(83,HbaseObjectWritable.getNextClassCode()); } @org.junit.Rule diff --git src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 41616c8..a59e152 100644 --- src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -50,6 +50,23 @@ import org.apache.hadoop.hbase.io.hfile.BlockCacheColumnFamilySummary; import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.ipc.ProtocolSignature; import org.apache.hadoop.hbase.ipc.RpcServer; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.LockRowResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse; import org.apache.hadoop.hbase.regionserver.CompactionRequestor; import org.apache.hadoop.hbase.regionserver.FlushRequester; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -64,6 +81,9 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + /** * A mock RegionServer implementation. * Use this when you can't bend Mockito to your liking (e.g. return null result @@ -72,7 +92,7 @@ import org.apache.zookeeper.KeeperException; * {@link #setGetResult(byte[], byte[], Result)} for how to fill the backing data * store that the get pulls from. */ -class MockRegionServer implements HRegionInterface, RegionServerServices { +class MockRegionServer implements HRegionInterface, ClientProtocol, RegionServerServices { private final ServerName sn; private final ZooKeeperWatcher zkw; private final Configuration conf; @@ -245,9 +265,8 @@ class MockRegionServer implements HRegionInterface, RegionServerServices { @Override public Result get(byte[] regionName, Get get) throws IOException { - Map m = this.gets.get(regionName); - if (m == null) return null; - return m.get(get.getRow()); + // TODO Auto-generated method stub + return null; } @Override @@ -597,4 +616,87 @@ class MockRegionServer implements HRegionInterface, RegionServerServices { public void mutateRow(byte[] regionName, RowMutations rm) throws IOException { // TODO Auto-generated method stub } + + @Override + public GetResponse get(RpcController controller, GetRequest request) + throws ServiceException { + byte[] regionName = request.getRegion().getValue().toByteArray(); + Map m = this.gets.get(regionName); + GetResponse.Builder builder = GetResponse.newBuilder(); + if (m != null) { + byte[] row = request.getGet().getRow().toByteArray(); + builder.setResult(ProtobufUtil.toResult(m.get(row))); + } + return builder.build(); + } + + @Override + public MutateResponse mutate(RpcController controller, MutateRequest request) + throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public ScanResponse scan(RpcController controller, ScanRequest request) + throws ServiceException { + ScanResponse.Builder builder = ScanResponse.newBuilder(); + try { + if (request.hasScan()) { + byte[] regionName = request.getRegion().getValue().toByteArray(); + builder.setScannerId(openScanner(regionName, null)); + builder.setMoreResults(true); + } + else { + long scannerId = request.getScannerId(); + Result result = next(scannerId); + if (result != null) { + builder.addResult(ProtobufUtil.toResult(result)); + builder.setMoreResults(true); + } + else { + builder.setMoreResults(false); + close(scannerId); + } + } + } catch (IOException ie) { + throw new ServiceException(ie); + } + return builder.build(); + } + + @Override + public LockRowResponse lockRow(RpcController controller, + LockRowRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public UnlockRowResponse unlockRow(RpcController controller, + UnlockRowRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public BulkLoadHFileResponse bulkLoadHFile(RpcController controller, + BulkLoadHFileRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public ExecCoprocessorResponse execCoprocessor(RpcController controller, + ExecCoprocessorRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } + + @Override + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( + RpcController controller, MultiRequest request) throws ServiceException { + // TODO Auto-generated method stub + return null; + } } \ No newline at end of file diff --git src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java index 6ed4ba2..b84a115 100644 --- src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java +++ src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java @@ -31,23 +31,25 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HServerLoad; -import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.catalog.CatalogTracker; -import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionTestingUtility; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.executor.EventHandler.EventType; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorType; import org.apache.hadoop.hbase.executor.RegionTransitionData; -import org.apache.hadoop.hbase.ipc.HRegionInterface; import org.apache.hadoop.hbase.master.handler.ServerShutdownHandler; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.regionserver.RegionOpeningState; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; @@ -65,6 +67,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + /** * Test {@link AssignmentManager} @@ -151,7 +156,7 @@ public class TestAssignmentManager { */ @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithOpenedNode() - throws IOException, KeeperException, InterruptedException { + throws IOException, KeeperException, InterruptedException, ServiceException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { @@ -194,7 +199,7 @@ public class TestAssignmentManager { @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithClosedNode() - throws IOException, KeeperException, InterruptedException { + throws IOException, KeeperException, InterruptedException, ServiceException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { @@ -238,7 +243,7 @@ public class TestAssignmentManager { @Test(timeout = 5000) public void testBalanceOnMasterFailoverScenarioWithOfflineNode() - throws IOException, KeeperException, InterruptedException { + throws IOException, KeeperException, InterruptedException, ServiceException { AssignmentManagerWithExtrasForTesting am = setUpMockedAssignmentManager(this.server, this.serverManager); try { @@ -363,7 +368,8 @@ public class TestAssignmentManager { * @throws IOException */ @Test - public void testShutdownHandler() throws KeeperException, IOException { + public void testShutdownHandler() + throws KeeperException, IOException, ServiceException { // Create and startup an executor. This is used by AssignmentManager // handling zk callbacks. ExecutorService executor = startupMasterExecutor("testShutdownHandler"); @@ -380,19 +386,20 @@ public class TestAssignmentManager { // Need to set up a fake scan of meta for the servershutdown handler // Make an RS Interface implementation. Make it so a scanner can go against it. - HRegionInterface implementation = Mockito.mock(HRegionInterface.class); + ClientProtocol implementation = Mockito.mock(ClientProtocol.class); // Get a meta row result that has region up on SERVERNAME_A Result r = Mocking.getMetaTableRowResult(REGIONINFO, SERVERNAME_A); - Mockito.when(implementation.openScanner((byte [])Mockito.any(), (Scan)Mockito.any())). - thenReturn(System.currentTimeMillis()); - // Return a good result first and then return null to indicate end of scan - Mockito.when(implementation.next(Mockito.anyLong(), Mockito.anyInt())). - thenReturn(new Result [] {r}, (Result [])null); + ScanResponse.Builder builder = ScanResponse.newBuilder(); + builder.setMoreResults(false); + builder.addResult(ProtobufUtil.toResult(r)); + Mockito.when(implementation.scan( + (RpcController)Mockito.any(), (ScanRequest)Mockito.any())). + thenReturn(builder.build()); // Get a connection w/ mocked up common methods. HConnection connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(HTU.getConfiguration(), - implementation, SERVERNAME_B, REGIONINFO); + null, implementation, SERVERNAME_B, REGIONINFO); // Make it so we can get a catalogtracker from servermanager.. .needed // down in guts of server shutdown handler. @@ -531,7 +538,7 @@ public class TestAssignmentManager { */ private AssignmentManagerWithExtrasForTesting setUpMockedAssignmentManager(final Server server, final ServerManager manager) - throws IOException, KeeperException { + throws IOException, KeeperException, ServiceException { // We need a mocked catalog tracker. Its used by our AM instance. CatalogTracker ct = Mockito.mock(CatalogTracker.class); // Make an RS Interface implementation. Make it so a scanner can go against @@ -539,21 +546,24 @@ public class TestAssignmentManager { // messing with. Needed when "new master" joins cluster. AM will try and // rebuild its list of user regions and it will also get the HRI that goes // with an encoded name by doing a Get on .META. - HRegionInterface ri = Mockito.mock(HRegionInterface.class); + ClientProtocol ri = Mockito.mock(ClientProtocol.class); // Get a meta row result that has region up on SERVERNAME_A for REGIONINFO Result r = Mocking.getMetaTableRowResult(REGIONINFO, SERVERNAME_A); - Mockito.when(ri .openScanner((byte[]) Mockito.any(), (Scan) Mockito.any())). - thenReturn(System.currentTimeMillis()); - // Return good result 'r' first and then return null to indicate end of scan - Mockito.when(ri.next(Mockito.anyLong(), Mockito.anyInt())). - thenReturn(new Result[] { r }, (Result[]) null); + ScanResponse.Builder builder = ScanResponse.newBuilder(); + builder.setMoreResults(false); + builder.addResult(ProtobufUtil.toResult(r)); + Mockito.when(ri.scan( + (RpcController)Mockito.any(), (ScanRequest)Mockito.any())). + thenReturn(builder.build()); // If a get, return the above result too for REGIONINFO - Mockito.when(ri.get((byte[]) Mockito.any(), (Get) Mockito.any())). - thenReturn(r); + GetResponse.Builder getBuilder = GetResponse.newBuilder(); + getBuilder.setResult(ProtobufUtil.toResult(r)); + Mockito.when(ri.get((RpcController)Mockito.any(), (GetRequest) Mockito.any())). + thenReturn(getBuilder.build()); // Get a connection w/ mocked up common methods. HConnection connection = HConnectionTestingUtility. - getMockedConnectionAndDecorate(HTU.getConfiguration(), ri, SERVERNAME_B, - REGIONINFO); + getMockedConnectionAndDecorate(HTU.getConfiguration(), null, + ri, SERVERNAME_B, REGIONINFO); // Make it so we can get the connection from our mocked catalogtracker Mockito.when(ct.getConnection()).thenReturn(connection); // Create and startup an executor. Used by AM handling zk callbacks. diff --git src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java index b4dcb83..cedf31e 100644 --- src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java +++ src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java @@ -54,6 +54,10 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; @@ -62,6 +66,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + @Category(SmallTests.class) public class TestCatalogJanitor { /** @@ -76,12 +83,22 @@ public class TestCatalogJanitor { MockServer(final HBaseTestingUtility htu) throws NotAllMetaRegionsOnlineException, IOException, InterruptedException { this.c = htu.getConfiguration(); + ClientProtocol ri = Mockito.mock(ClientProtocol.class); + MutateResponse.Builder builder = MutateResponse.newBuilder(); + builder.setProcessed(true); + try { + Mockito.when(ri.mutate( + (RpcController)Mockito.any(), (MutateRequest)Mockito.any())). + thenReturn(builder.build()); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } // Mock an HConnection and a HRegionInterface implementation. Have the // HConnection return the HRI. Have the HRI return a few mocked up responses // to make our test work. this.connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(this.c, - Mockito.mock(HRegionInterface.class), + Mockito.mock(HRegionInterface.class), ri, new ServerName("example.org,12345,6789"), HRegionInfo.FIRST_META_REGIONINFO); // Set hbase.rootdir into test dir. diff --git src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java index ceca6f5..41b339c 100644 --- src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java +++ src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java @@ -194,7 +194,7 @@ public class TestMasterNoCluster { // associate so the below mocking of a connection will fail. HConnection connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(TESTUTIL.getConfiguration(), - rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO); + rs0, rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO); return new CatalogTracker(zk, conf, connection, abortable, defaultTimeout); } }; @@ -271,7 +271,7 @@ public class TestMasterNoCluster { // of a connection will fail. HConnection connection = HConnectionTestingUtility.getMockedConnectionAndDecorate(TESTUTIL.getConfiguration(), - rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO); + rs0, rs0, rs0.getServerName(), HRegionInfo.ROOT_REGIONINFO); return new CatalogTracker(zk, conf, connection, abortable, defaultTimeout); } }; diff --git src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java index cac2989..fa177ae 100644 --- src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java +++ src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java @@ -25,6 +25,11 @@ import java.util.List; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; + +import com.google.protobuf.ServiceException; /** * A region server that will OOME. @@ -42,10 +47,16 @@ public class OOMERegionServer extends HRegionServer { public void put(byte [] regionName, Put put) throws IOException { - super.put(regionName, put); - for (int i = 0; i < 30; i++) { - // Add the batch update 30 times to bring on the OOME faster. - this.retainer.add(put); + try { + MutateRequest request = + RequestConverter.buildMutateRequest(regionName, put); + super.mutate(null, request); + for (int i = 0; i < 30; i++) { + // Add the batch update 30 times to bring on the OOME faster. + this.retainer.add(put); + } + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); } } diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index a1bf73b..d0cad45 100644 --- src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -39,6 +39,9 @@ import org.apache.hadoop.hbase.client.ServerCallable; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.ipc.HRegionInterface; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.junit.Test; @@ -145,7 +148,9 @@ public class TestHRegionServerBulkLoad { LOG.debug("Going to connect to server " + location + " for row " + Bytes.toStringBinary(row)); byte[] regionName = location.getRegionInfo().getRegionName(); - server.bulkLoadHFiles(famPaths, regionName); + BulkLoadHFileRequest request = + RequestConverter.buildBulkLoadHFileRequest(famPaths, regionName); + server.bulkLoadHFile(null, request); return null; } }.withRetries(); @@ -159,6 +164,8 @@ public class TestHRegionServerBulkLoad { public Void call() throws Exception { LOG.debug("compacting " + location + " for row " + Bytes.toStringBinary(row)); + HRegionInterface server = connection.getHRegionConnection( + location.getHostname(), location.getPort()); server.compactRegion(location.getRegionInfo(), true); numCompactions.incrementAndGet(); return null;