diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java index a5dc7fb..4fa20e6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java @@ -444,14 +444,22 @@ public class MetricsConnection implements StatisticTrackable { // use generic implementation break; case 4: - assert "ExecService".equals(method.getName()); + assert "PrepareBulkLoad".equals(method.getName()); // use generic implementation break; case 5: - assert "ExecRegionServerService".equals(method.getName()); + assert "CleanupBulkLoad".equals(method.getName()); // use generic implementation break; case 6: + assert "ExecService".equals(method.getName()); + // use generic implementation + break; + case 7: + assert "ExecRegionServerService".equals(method.getName()); + // use generic implementation + break; + case 8: assert "Multi".equals(method.getName()); multiTracker.updateRpc(stats); return; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java new file mode 100644 index 0000000..7b1547d --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client; + +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.security.SecureBulkLoadUtil; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.security.token.Token; + +import com.google.protobuf.ServiceException; + +/** + * Client proxy for SecureBulkLoadProtocol + */ +@InterfaceAudience.Private +public class SecureBulkLoadClient { + private Table table; + + public SecureBulkLoadClient(Table table) { + this.table = table; + } + + public String prepareBulkLoad(final Connection conn) throws IOException { + try { + RegionServerCallable callable = + new RegionServerCallable(conn, table.getName(), HConstants.EMPTY_START_ROW) { + @Override + public String call(int callTimeout) throws IOException { + byte[] regionName = getLocation().getRegionInfo().getRegionName(); + RegionSpecifier region = + RequestConverter + .buildRegionSpecifier(RegionSpecifierType.REGION_NAME, regionName); + try { + PrepareBulkLoadRequest request = + PrepareBulkLoadRequest.newBuilder() + .setTableName(ProtobufUtil.toProtoTableName(table.getName())) + .setRegion(region).build(); + PrepareBulkLoadResponse response = getStub().prepareBulkLoad(null, request); + return response.getBulkToken(); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + } + }; + return RpcRetryingCallerFactory.instantiate(conn.getConfiguration(), null) + . newCaller().callWithRetries(callable, Integer.MAX_VALUE); + } catch (Throwable throwable) { + throw new IOException(throwable); + } + } + + public void cleanupBulkLoad(final Connection conn, final String bulkToken) throws IOException { + try { + RegionServerCallable callable = + new RegionServerCallable(conn, table.getName(), HConstants.EMPTY_START_ROW) { + @Override + public Void call(int callTimeout) throws IOException { + byte[] regionName = getLocation().getRegionInfo().getRegionName(); + RegionSpecifier region = RequestConverter.buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, regionName); + try { + CleanupBulkLoadRequest request = + CleanupBulkLoadRequest.newBuilder().setRegion(region) + .setBulkToken(bulkToken).build(); + getStub().cleanupBulkLoad(null, request); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + return null; + } + }; + RpcRetryingCallerFactory.instantiate(conn.getConfiguration(), null) + . newCaller().callWithRetries(callable, Integer.MAX_VALUE); + } catch (Throwable throwable) { + throw new IOException(throwable); + } + } + + /** + * Securely bulk load a list of HFiles using client protocol. + * + * @param client + * @param familyPaths + * @param regionName + * @param assignSeqNum + * @param userToken + * @param bulkToken + * @return true if all are loaded + * @throws IOException + */ + public boolean secureBulkLoadHFiles(final ClientService.BlockingInterface client, + final List> familyPaths, + final byte[] regionName, boolean assignSeqNum, + final Token userToken, final String bulkToken) throws IOException { + BulkLoadHFileRequest request = + RequestConverter.buildBulkLoadHFileRequest(familyPaths, regionName, assignSeqNum, + userToken, bulkToken); + + try { + BulkLoadHFileResponse response = client.bulkLoadHFile(null, request); + return response.getLoaded(); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); + } + } + + public Path getStagingPath(String bulkToken, byte[] family) throws IOException { + return SecureBulkLoadUtil.getStagingPath(table.getConfiguration(), bulkToken, family); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java deleted file mode 100644 index c27322a..0000000 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.client.coprocessor; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; -import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -import org.apache.hadoop.hbase.ipc.ServerRpcController; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos; -import org.apache.hadoop.hbase.security.SecureBulkLoadUtil; -import org.apache.hadoop.hbase.util.ByteStringer; -import org.apache.hadoop.hbase.util.Pair; -import org.apache.hadoop.security.token.Token; - -/** - * Client proxy for SecureBulkLoadProtocol - * used in conjunction with SecureBulkLoadEndpoint - */ -@InterfaceAudience.Private -public class SecureBulkLoadClient { - private Table table; - - public SecureBulkLoadClient(Table table) { - this.table = table; - } - - public String prepareBulkLoad(final TableName tableName) throws IOException { - try { - CoprocessorRpcChannel channel = table.coprocessorService(HConstants.EMPTY_START_ROW); - SecureBulkLoadProtos.SecureBulkLoadService instance = - ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel); - - ServerRpcController controller = new ServerRpcController(); - - BlockingRpcCallback rpcCallback = - new BlockingRpcCallback(); - - SecureBulkLoadProtos.PrepareBulkLoadRequest request = - SecureBulkLoadProtos.PrepareBulkLoadRequest.newBuilder() - .setTableName(ProtobufUtil.toProtoTableName(tableName)).build(); - - instance.prepareBulkLoad(controller, - request, - rpcCallback); - - SecureBulkLoadProtos.PrepareBulkLoadResponse response = rpcCallback.get(); - if (controller.failedOnException()) { - throw controller.getFailedOn(); - } - - return response.getBulkToken(); - } catch (Throwable throwable) { - throw new IOException(throwable); - } - } - - public void cleanupBulkLoad(final String bulkToken) throws IOException { - try { - CoprocessorRpcChannel channel = table.coprocessorService(HConstants.EMPTY_START_ROW); - SecureBulkLoadProtos.SecureBulkLoadService instance = - ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel); - - ServerRpcController controller = new ServerRpcController(); - - BlockingRpcCallback rpcCallback = - new BlockingRpcCallback(); - - SecureBulkLoadProtos.CleanupBulkLoadRequest request = - SecureBulkLoadProtos.CleanupBulkLoadRequest.newBuilder() - .setBulkToken(bulkToken).build(); - - instance.cleanupBulkLoad(controller, - request, - rpcCallback); - - if (controller.failedOnException()) { - throw controller.getFailedOn(); - } - } catch (Throwable throwable) { - throw new IOException(throwable); - } - } - - public boolean bulkLoadHFiles(final List> familyPaths, - final Token userToken, - final String bulkToken, - final byte[] startRow) throws IOException { - // we never want to send a batch of HFiles to all regions, thus cannot call - // HTable#coprocessorService methods that take start and end rowkeys; see HBASE-9639 - try { - CoprocessorRpcChannel channel = table.coprocessorService(startRow); - SecureBulkLoadProtos.SecureBulkLoadService instance = - ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel); - - SecureBulkLoadProtos.DelegationToken protoDT = - SecureBulkLoadProtos.DelegationToken.newBuilder().build(); - if(userToken != null) { - protoDT = - SecureBulkLoadProtos.DelegationToken.newBuilder() - .setIdentifier(ByteStringer.wrap(userToken.getIdentifier())) - .setPassword(ByteStringer.wrap(userToken.getPassword())) - .setKind(userToken.getKind().toString()) - .setService(userToken.getService().toString()).build(); - } - - List protoFamilyPaths = - new ArrayList(); - for(Pair el: familyPaths) { - protoFamilyPaths.add(ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() - .setFamily(ByteStringer.wrap(el.getFirst())) - .setPath(el.getSecond()).build()); - } - - SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request = - SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.newBuilder() - .setFsToken(protoDT) - .addAllFamilyPath(protoFamilyPaths) - .setBulkToken(bulkToken).build(); - - ServerRpcController controller = new ServerRpcController(); - BlockingRpcCallback rpcCallback = - new BlockingRpcCallback(); - instance.secureBulkLoadHFiles(controller, - request, - rpcCallback); - - SecureBulkLoadProtos.SecureBulkLoadHFilesResponse response = rpcCallback.get(); - if (controller.failedOnException()) { - throw controller.getFailedOn(); - } - return response.getLoaded(); - } catch (Throwable throwable) { - throw new IOException(throwable); - } - } - - public Path getStagingPath(String bulkToken, byte[] family) throws IOException { - return SecureBulkLoadUtil.getStagingPath(table.getConfiguration(), bulkToken, family); - } -} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index fecc3c2..716dfd3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -106,8 +106,6 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionReques import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall; @@ -1576,30 +1574,6 @@ public final class ProtobufUtil { // Start helpers for Client - /** - * A helper to bulk load a list of HFiles using client protocol. - * - * @param client - * @param familyPaths - * @param regionName - * @param assignSeqNum - * @return true if all are loaded - * @throws IOException - */ - public static boolean bulkLoadHFile(final ClientService.BlockingInterface client, - final List> familyPaths, - final byte[] regionName, boolean assignSeqNum) throws IOException { - BulkLoadHFileRequest request = - RequestConverter.buildBulkLoadHFileRequest(familyPaths, regionName, assignSeqNum); - try { - BulkLoadHFileResponse response = - client.bulkLoadHFile(null, request); - return response.getLoaded(); - } catch (ServiceException se) { - throw getRemoteException(se); - } - } - public static CoprocessorServiceResponse execService(final RpcController controller, final ClientService.BlockingInterface client, final CoprocessorServiceCall call, final byte[] regionName) throws IOException { @@ -1616,8 +1590,8 @@ public final class ProtobufUtil { } public static CoprocessorServiceResponse execService(final RpcController controller, - final MasterService.BlockingInterface client, final CoprocessorServiceCall call) - throws IOException { + final MasterService.BlockingInterface client, final CoprocessorServiceCall call) + throws IOException { CoprocessorServiceRequest request = CoprocessorServiceRequest.newBuilder() .setCall(call).setRegion( RequestConverter.buildRegionSpecifier(REGION_NAME, HConstants.EMPTY_BYTE_ARRAY)).build(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index ce01e1e..1ddfb04 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.protobuf; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; @@ -64,7 +65,6 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodes import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; @@ -103,8 +103,8 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableReques import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest; -import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ReleaseSplitOrMergeLockAndRollbackRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest; @@ -115,6 +115,7 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.security.token.Token; import com.google.protobuf.ByteString; @@ -526,19 +527,41 @@ public final class RequestConverter { */ public static BulkLoadHFileRequest buildBulkLoadHFileRequest( final List> familyPaths, - final byte[] regionName, boolean assignSeqNum) { - BulkLoadHFileRequest.Builder builder = BulkLoadHFileRequest.newBuilder(); - RegionSpecifier region = buildRegionSpecifier( + final byte[] regionName, boolean assignSeqNum, + final Token userToken, final String bulkToken) { + RegionSpecifier region = RequestConverter.buildRegionSpecifier( RegionSpecifierType.REGION_NAME, regionName); - builder.setRegion(region); - FamilyPath.Builder familyPathBuilder = FamilyPath.newBuilder(); - for (Pair familyPath: familyPaths) { - familyPathBuilder.setFamily(ByteStringer.wrap(familyPath.getFirst())); - familyPathBuilder.setPath(familyPath.getSecond()); - builder.addFamilyPath(familyPathBuilder.build()); + + ClientProtos.DelegationToken protoDT = null; + if (userToken != null) { + protoDT = + ClientProtos.DelegationToken.newBuilder() + .setIdentifier(ByteStringer.wrap(userToken.getIdentifier())) + .setPassword(ByteStringer.wrap(userToken.getPassword())) + .setKind(userToken.getKind().toString()) + .setService(userToken.getService().toString()).build(); } - builder.setAssignSeqNum(assignSeqNum); - return builder.build(); + + List protoFamilyPaths = + new ArrayList(familyPaths.size()); + for(Pair el: familyPaths) { + protoFamilyPaths.add(ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() + .setFamily(ByteStringer.wrap(el.getFirst())) + .setPath(el.getSecond()).build()); + } + + BulkLoadHFileRequest.Builder request = + ClientProtos.BulkLoadHFileRequest.newBuilder() + .setRegion(region) + .setAssignSeqNum(assignSeqNum) + .addAllFamilyPath(protoFamilyPaths); + if (userToken != null) { + request.setFsToken(protoDT); + } + if (bulkToken != null) { + request.setBulkToken(bulkToken); + } + return request.build(); } /** diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index e8135a8..1ece448 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -53,6 +53,8 @@ import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest; @@ -63,6 +65,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException; @@ -478,6 +482,18 @@ public class TestClientNoCluster extends Configured implements Tool { CoprocessorServiceRequest request) throws ServiceException { throw new NotImplementedException(); } + + @Override + public PrepareBulkLoadResponse prepareBulkLoad(RpcController controller, + PrepareBulkLoadRequest request) throws ServiceException { + throw new NotImplementedException(); + } + + @Override + public CleanupBulkLoadResponse cleanupBulkLoad(RpcController controller, + CleanupBulkLoadRequest request) throws ServiceException { + throw new NotImplementedException(); + } } static ScanResponse doMetaScanResponse(final SortedMap> meta, diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index 3cae4d2..4e7c8d2 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -118,6 +118,7 @@ public class HBaseCommonTestingUtility { if (deleteOnExit()) this.dataTestDir.deleteOnExit(); createSubDir("hbase.local.dir", testPath, "hbase-local-dir"); + createSubDir("hbase.bulkload.staging.dir", testPath, "staging"); return testPath; } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java index 4deab19..d7b8461 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java @@ -21048,6 +21048,35 @@ public final class ClientProtos { * optional bool assign_seq_num = 3; */ boolean getAssignSeqNum(); + + // optional .hbase.pb.DelegationToken fs_token = 4; + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + boolean hasFsToken(); + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken(); + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder(); + + // optional string bulk_token = 5; + /** + * optional string bulk_token = 5; + */ + boolean hasBulkToken(); + /** + * optional string bulk_token = 5; + */ + java.lang.String getBulkToken(); + /** + * optional string bulk_token = 5; + */ + com.google.protobuf.ByteString + getBulkTokenBytes(); } /** * Protobuf type {@code hbase.pb.BulkLoadHFileRequest} @@ -21132,6 +21161,24 @@ public final class ClientProtos { assignSeqNum_ = input.readBool(); break; } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + subBuilder = fsToken_.toBuilder(); + } + fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(fsToken_); + fsToken_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000004; + break; + } + case 42: { + bitField0_ |= 0x00000008; + bulkToken_ = input.readBytes(); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -21867,10 +21914,77 @@ public final class ClientProtos { return assignSeqNum_; } + // optional .hbase.pb.DelegationToken fs_token = 4; + public static final int FS_TOKEN_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_; + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public boolean hasFsToken() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() { + return fsToken_; + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { + return fsToken_; + } + + // optional string bulk_token = 5; + public static final int BULK_TOKEN_FIELD_NUMBER = 5; + private java.lang.Object bulkToken_; + /** + * optional string bulk_token = 5; + */ + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string bulk_token = 5; + */ + public java.lang.String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + bulkToken_ = s; + } + return s; + } + } + /** + * optional string bulk_token = 5; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private void initFields() { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); familyPath_ = java.util.Collections.emptyList(); assignSeqNum_ = false; + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + bulkToken_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -21907,6 +22021,12 @@ public final class ClientProtos { if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBool(3, assignSeqNum_); } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(4, fsToken_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(5, getBulkTokenBytes()); + } getUnknownFields().writeTo(output); } @@ -21928,6 +22048,14 @@ public final class ClientProtos { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, assignSeqNum_); } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, fsToken_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(5, getBulkTokenBytes()); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -21963,6 +22091,16 @@ public final class ClientProtos { result = result && (getAssignSeqNum() == other.getAssignSeqNum()); } + result = result && (hasFsToken() == other.hasFsToken()); + if (hasFsToken()) { + result = result && getFsToken() + .equals(other.getFsToken()); + } + result = result && (hasBulkToken() == other.hasBulkToken()); + if (hasBulkToken()) { + result = result && getBulkToken() + .equals(other.getBulkToken()); + } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -21988,6 +22126,14 @@ public final class ClientProtos { hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getAssignSeqNum()); } + if (hasFsToken()) { + hash = (37 * hash) + FS_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getFsToken().hashCode(); + } + if (hasBulkToken()) { + hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getBulkToken().hashCode(); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; @@ -22097,6 +22243,7 @@ public final class ClientProtos { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRegionFieldBuilder(); getFamilyPathFieldBuilder(); + getFsTokenFieldBuilder(); } } private static Builder create() { @@ -22119,6 +22266,14 @@ public final class ClientProtos { } assignSeqNum_ = false; bitField0_ = (bitField0_ & ~0x00000004); + if (fsTokenBuilder_ == null) { + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + } else { + fsTokenBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + bulkToken_ = ""; + bitField0_ = (bitField0_ & ~0x00000010); return this; } @@ -22168,6 +22323,18 @@ public final class ClientProtos { to_bitField0_ |= 0x00000002; } result.assignSeqNum_ = assignSeqNum_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; + } + if (fsTokenBuilder_ == null) { + result.fsToken_ = fsToken_; + } else { + result.fsToken_ = fsTokenBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000008; + } + result.bulkToken_ = bulkToken_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -22216,6 +22383,14 @@ public final class ClientProtos { if (other.hasAssignSeqNum()) { setAssignSeqNum(other.getAssignSeqNum()); } + if (other.hasFsToken()) { + mergeFsToken(other.getFsToken()); + } + if (other.hasBulkToken()) { + bitField0_ |= 0x00000010; + bulkToken_ = other.bulkToken_; + onChanged(); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -22647,6 +22822,197 @@ public final class ClientProtos { return this; } + // optional .hbase.pb.DelegationToken fs_token = 4; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_; + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public boolean hasFsToken() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() { + if (fsTokenBuilder_ == null) { + return fsToken_; + } else { + return fsTokenBuilder_.getMessage(); + } + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) { + if (fsTokenBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + fsToken_ = value; + onChanged(); + } else { + fsTokenBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + return this; + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public Builder setFsToken( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) { + if (fsTokenBuilder_ == null) { + fsToken_ = builderForValue.build(); + onChanged(); + } else { + fsTokenBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + return this; + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) { + if (fsTokenBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008) && + fsToken_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) { + fsToken_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial(); + } else { + fsToken_ = value; + } + onChanged(); + } else { + fsTokenBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + return this; + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public Builder clearFsToken() { + if (fsTokenBuilder_ == null) { + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + onChanged(); + } else { + fsTokenBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getFsTokenFieldBuilder().getBuilder(); + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { + if (fsTokenBuilder_ != null) { + return fsTokenBuilder_.getMessageOrBuilder(); + } else { + return fsToken_; + } + } + /** + * optional .hbase.pb.DelegationToken fs_token = 4; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> + getFsTokenFieldBuilder() { + if (fsTokenBuilder_ == null) { + fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>( + fsToken_, + getParentForChildren(), + isClean()); + fsToken_ = null; + } + return fsTokenBuilder_; + } + + // optional string bulk_token = 5; + private java.lang.Object bulkToken_ = ""; + /** + * optional string bulk_token = 5; + */ + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional string bulk_token = 5; + */ + public java.lang.String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + bulkToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string bulk_token = 5; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string bulk_token = 5; + */ + public Builder setBulkToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + bulkToken_ = value; + onChanged(); + return this; + } + /** + * optional string bulk_token = 5; + */ + public Builder clearBulkToken() { + bitField0_ = (bitField0_ & ~0x00000010); + bulkToken_ = getDefaultInstance().getBulkToken(); + onChanged(); + return this; + } + /** + * optional string bulk_token = 5; + */ + public Builder setBulkTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + bulkToken_ = value; + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileRequest) } @@ -22784,17 +23150,3302 @@ public final class ClientProtos { } private void initFields() { - loaded_ = false; + loaded_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLoaded()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, loaded_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, loaded_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; + + boolean result = true; + result = result && (hasLoaded() == other.hasLoaded()); + if (hasLoaded()) { + result = result && (getLoaded() + == other.getLoaded()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLoaded()) { + hash = (37 * hash) + LOADED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getLoaded()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.BulkLoadHFileResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + loaded_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.loaded_ = loaded_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; + if (other.hasLoaded()) { + setLoaded(other.getLoaded()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLoaded()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bool loaded = 1; + private boolean loaded_ ; + /** + * required bool loaded = 1; + */ + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool loaded = 1; + */ + public boolean getLoaded() { + return loaded_; + } + /** + * required bool loaded = 1; + */ + public Builder setLoaded(boolean value) { + bitField0_ |= 0x00000001; + loaded_ = value; + onChanged(); + return this; + } + /** + * required bool loaded = 1; + */ + public Builder clearLoaded() { + bitField0_ = (bitField0_ & ~0x00000001); + loaded_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileResponse) + } + + static { + defaultInstance = new BulkLoadHFileResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileResponse) + } + + public interface DelegationTokenOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes identifier = 1; + /** + * optional bytes identifier = 1; + */ + boolean hasIdentifier(); + /** + * optional bytes identifier = 1; + */ + com.google.protobuf.ByteString getIdentifier(); + + // optional bytes password = 2; + /** + * optional bytes password = 2; + */ + boolean hasPassword(); + /** + * optional bytes password = 2; + */ + com.google.protobuf.ByteString getPassword(); + + // optional string kind = 3; + /** + * optional string kind = 3; + */ + boolean hasKind(); + /** + * optional string kind = 3; + */ + java.lang.String getKind(); + /** + * optional string kind = 3; + */ + com.google.protobuf.ByteString + getKindBytes(); + + // optional string service = 4; + /** + * optional string service = 4; + */ + boolean hasService(); + /** + * optional string service = 4; + */ + java.lang.String getService(); + /** + * optional string service = 4; + */ + com.google.protobuf.ByteString + getServiceBytes(); + } + /** + * Protobuf type {@code hbase.pb.DelegationToken} + */ + public static final class DelegationToken extends + com.google.protobuf.GeneratedMessage + implements DelegationTokenOrBuilder { + // Use DelegationToken.newBuilder() to construct. + private DelegationToken(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final DelegationToken defaultInstance; + public static DelegationToken getDefaultInstance() { + return defaultInstance; + } + + public DelegationToken getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DelegationToken( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + identifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + password_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + kind_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + service_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DelegationToken parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DelegationToken(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional bytes identifier = 1; + public static final int IDENTIFIER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString identifier_; + /** + * optional bytes identifier = 1; + */ + public boolean hasIdentifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bytes identifier = 1; + */ + public com.google.protobuf.ByteString getIdentifier() { + return identifier_; + } + + // optional bytes password = 2; + public static final int PASSWORD_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString password_; + /** + * optional bytes password = 2; + */ + public boolean hasPassword() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes password = 2; + */ + public com.google.protobuf.ByteString getPassword() { + return password_; + } + + // optional string kind = 3; + public static final int KIND_FIELD_NUMBER = 3; + private java.lang.Object kind_; + /** + * optional string kind = 3; + */ + public boolean hasKind() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string kind = 3; + */ + public java.lang.String getKind() { + java.lang.Object ref = kind_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + kind_ = s; + } + return s; + } + } + /** + * optional string kind = 3; + */ + public com.google.protobuf.ByteString + getKindBytes() { + java.lang.Object ref = kind_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kind_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string service = 4; + public static final int SERVICE_FIELD_NUMBER = 4; + private java.lang.Object service_; + /** + * optional string service = 4; + */ + public boolean hasService() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string service = 4; + */ + public java.lang.String getService() { + java.lang.Object ref = service_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + service_ = s; + } + return s; + } + } + /** + * optional string service = 4; + */ + public com.google.protobuf.ByteString + getServiceBytes() { + java.lang.Object ref = service_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + service_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + identifier_ = com.google.protobuf.ByteString.EMPTY; + password_ = com.google.protobuf.ByteString.EMPTY; + kind_ = ""; + service_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, identifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, password_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getKindBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getServiceBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, identifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, password_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getKindBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getServiceBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) obj; + + boolean result = true; + result = result && (hasIdentifier() == other.hasIdentifier()); + if (hasIdentifier()) { + result = result && getIdentifier() + .equals(other.getIdentifier()); + } + result = result && (hasPassword() == other.hasPassword()); + if (hasPassword()) { + result = result && getPassword() + .equals(other.getPassword()); + } + result = result && (hasKind() == other.hasKind()); + if (hasKind()) { + result = result && getKind() + .equals(other.getKind()); + } + result = result && (hasService() == other.hasService()); + if (hasService()) { + result = result && getService() + .equals(other.getService()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasIdentifier()) { + hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER; + hash = (53 * hash) + getIdentifier().hashCode(); + } + if (hasPassword()) { + hash = (37 * hash) + PASSWORD_FIELD_NUMBER; + hash = (53 * hash) + getPassword().hashCode(); + } + if (hasKind()) { + hash = (37 * hash) + KIND_FIELD_NUMBER; + hash = (53 * hash) + getKind().hashCode(); + } + if (hasService()) { + hash = (37 * hash) + SERVICE_FIELD_NUMBER; + hash = (53 * hash) + getService().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.DelegationToken} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + identifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + password_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + kind_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + service_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_DelegationToken_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.identifier_ = identifier_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.password_ = password_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.kind_ = kind_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.service_ = service_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) return this; + if (other.hasIdentifier()) { + setIdentifier(other.getIdentifier()); + } + if (other.hasPassword()) { + setPassword(other.getPassword()); + } + if (other.hasKind()) { + bitField0_ |= 0x00000004; + kind_ = other.kind_; + onChanged(); + } + if (other.hasService()) { + bitField0_ |= 0x00000008; + service_ = other.service_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional bytes identifier = 1; + private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes identifier = 1; + */ + public boolean hasIdentifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional bytes identifier = 1; + */ + public com.google.protobuf.ByteString getIdentifier() { + return identifier_; + } + /** + * optional bytes identifier = 1; + */ + public Builder setIdentifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + identifier_ = value; + onChanged(); + return this; + } + /** + * optional bytes identifier = 1; + */ + public Builder clearIdentifier() { + bitField0_ = (bitField0_ & ~0x00000001); + identifier_ = getDefaultInstance().getIdentifier(); + onChanged(); + return this; + } + + // optional bytes password = 2; + private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes password = 2; + */ + public boolean hasPassword() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes password = 2; + */ + public com.google.protobuf.ByteString getPassword() { + return password_; + } + /** + * optional bytes password = 2; + */ + public Builder setPassword(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + password_ = value; + onChanged(); + return this; + } + /** + * optional bytes password = 2; + */ + public Builder clearPassword() { + bitField0_ = (bitField0_ & ~0x00000002); + password_ = getDefaultInstance().getPassword(); + onChanged(); + return this; + } + + // optional string kind = 3; + private java.lang.Object kind_ = ""; + /** + * optional string kind = 3; + */ + public boolean hasKind() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string kind = 3; + */ + public java.lang.String getKind() { + java.lang.Object ref = kind_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + kind_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string kind = 3; + */ + public com.google.protobuf.ByteString + getKindBytes() { + java.lang.Object ref = kind_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kind_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string kind = 3; + */ + public Builder setKind( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + kind_ = value; + onChanged(); + return this; + } + /** + * optional string kind = 3; + */ + public Builder clearKind() { + bitField0_ = (bitField0_ & ~0x00000004); + kind_ = getDefaultInstance().getKind(); + onChanged(); + return this; + } + /** + * optional string kind = 3; + */ + public Builder setKindBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + kind_ = value; + onChanged(); + return this; + } + + // optional string service = 4; + private java.lang.Object service_ = ""; + /** + * optional string service = 4; + */ + public boolean hasService() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string service = 4; + */ + public java.lang.String getService() { + java.lang.Object ref = service_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + service_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string service = 4; + */ + public com.google.protobuf.ByteString + getServiceBytes() { + java.lang.Object ref = service_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + service_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string service = 4; + */ + public Builder setService( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + service_ = value; + onChanged(); + return this; + } + /** + * optional string service = 4; + */ + public Builder clearService() { + bitField0_ = (bitField0_ & ~0x00000008); + service_ = getDefaultInstance().getService(); + onChanged(); + return this; + } + /** + * optional string service = 4; + */ + public Builder setServiceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + service_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.DelegationToken) + } + + static { + defaultInstance = new DelegationToken(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.DelegationToken) + } + + public interface PrepareBulkLoadRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .hbase.pb.TableName table_name = 1; + /** + * required .hbase.pb.TableName table_name = 1; + */ + boolean hasTableName(); + /** + * required .hbase.pb.TableName table_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); + /** + * required .hbase.pb.TableName table_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); + + // optional .hbase.pb.RegionSpecifier region = 2; + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + boolean hasRegion(); + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + } + /** + * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} + */ + public static final class PrepareBulkLoadRequest extends + com.google.protobuf.GeneratedMessage + implements PrepareBulkLoadRequestOrBuilder { + // Use PrepareBulkLoadRequest.newBuilder() to construct. + private PrepareBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private PrepareBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final PrepareBulkLoadRequest defaultInstance; + public static PrepareBulkLoadRequest getDefaultInstance() { + return defaultInstance; + } + + public PrepareBulkLoadRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PrepareBulkLoadRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = tableName_.toBuilder(); + } + tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tableName_); + tableName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PrepareBulkLoadRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrepareBulkLoadRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .hbase.pb.TableName table_name = 1; + public static final int TABLE_NAME_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; + /** + * required .hbase.pb.TableName table_name = 1; + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { + return tableName_; + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { + return tableName_; + } + + // optional .hbase.pb.RegionSpecifier region = 2; + public static final int REGION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + private void initFields() { + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!getTableName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasRegion()) { + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, region_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, region_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTableNameFieldBuilder(); + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (tableNameBuilder_ == null) { + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + } else { + tableNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (tableNameBuilder_ == null) { + result.tableName_ = tableName_; + } else { + result.tableName_ = tableNameBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + mergeTableName(other.getTableName()); + } + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!getTableName().isInitialized()) { + + return false; + } + if (hasRegion()) { + if (!getRegion().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .hbase.pb.TableName table_name = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; + /** + * required .hbase.pb.TableName table_name = 1; + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { + if (tableNameBuilder_ == null) { + return tableName_; + } else { + return tableNameBuilder_.getMessage(); + } + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { + if (tableNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tableName_ = value; + onChanged(); + } else { + tableNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public Builder setTableName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { + if (tableNameBuilder_ == null) { + tableName_ = builderForValue.build(); + onChanged(); + } else { + tableNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { + if (tableNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { + tableName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); + } else { + tableName_ = value; + } + onChanged(); + } else { + tableNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public Builder clearTableName() { + if (tableNameBuilder_ == null) { + tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); + onChanged(); + } else { + tableNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTableNameFieldBuilder().getBuilder(); + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { + if (tableNameBuilder_ != null) { + return tableNameBuilder_.getMessageOrBuilder(); + } else { + return tableName_; + } + } + /** + * required .hbase.pb.TableName table_name = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> + getTableNameFieldBuilder() { + if (tableNameBuilder_ == null) { + tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( + tableName_, + getParentForChildren(), + isClean()); + tableName_ = null; + } + return tableNameBuilder_; + } + + // optional .hbase.pb.RegionSpecifier region = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadRequest) + } + + static { + defaultInstance = new PrepareBulkLoadRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadRequest) + } + + public interface PrepareBulkLoadResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string bulk_token = 1; + /** + * required string bulk_token = 1; + */ + boolean hasBulkToken(); + /** + * required string bulk_token = 1; + */ + java.lang.String getBulkToken(); + /** + * required string bulk_token = 1; + */ + com.google.protobuf.ByteString + getBulkTokenBytes(); + } + /** + * Protobuf type {@code hbase.pb.PrepareBulkLoadResponse} + */ + public static final class PrepareBulkLoadResponse extends + com.google.protobuf.GeneratedMessage + implements PrepareBulkLoadResponseOrBuilder { + // Use PrepareBulkLoadResponse.newBuilder() to construct. + private PrepareBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private PrepareBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final PrepareBulkLoadResponse defaultInstance; + public static PrepareBulkLoadResponse getDefaultInstance() { + return defaultInstance; + } + + public PrepareBulkLoadResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PrepareBulkLoadResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + bulkToken_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PrepareBulkLoadResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrepareBulkLoadResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string bulk_token = 1; + public static final int BULK_TOKEN_FIELD_NUMBER = 1; + private java.lang.Object bulkToken_; + /** + * required string bulk_token = 1; + */ + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string bulk_token = 1; + */ + public java.lang.String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + bulkToken_ = s; + } + return s; + } + } + /** + * required string bulk_token = 1; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + bulkToken_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasBulkToken()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getBulkTokenBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getBulkTokenBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) obj; + + boolean result = true; + result = result && (hasBulkToken() == other.hasBulkToken()); + if (hasBulkToken()) { + result = result && getBulkToken() + .equals(other.getBulkToken()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasBulkToken()) { + hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getBulkToken().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.PrepareBulkLoadResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + bulkToken_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.bulkToken_ = bulkToken_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()) return this; + if (other.hasBulkToken()) { + bitField0_ |= 0x00000001; + bulkToken_ = other.bulkToken_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasBulkToken()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string bulk_token = 1; + private java.lang.Object bulkToken_ = ""; + /** + * required string bulk_token = 1; + */ + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string bulk_token = 1; + */ + public java.lang.String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + bulkToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string bulk_token = 1; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string bulk_token = 1; + */ + public Builder setBulkToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + bulkToken_ = value; + onChanged(); + return this; + } + /** + * required string bulk_token = 1; + */ + public Builder clearBulkToken() { + bitField0_ = (bitField0_ & ~0x00000001); + bulkToken_ = getDefaultInstance().getBulkToken(); + onChanged(); + return this; + } + /** + * required string bulk_token = 1; + */ + public Builder setBulkTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + bulkToken_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadResponse) + } + + static { + defaultInstance = new PrepareBulkLoadResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadResponse) + } + + public interface CleanupBulkLoadRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string bulk_token = 1; + /** + * required string bulk_token = 1; + */ + boolean hasBulkToken(); + /** + * required string bulk_token = 1; + */ + java.lang.String getBulkToken(); + /** + * required string bulk_token = 1; + */ + com.google.protobuf.ByteString + getBulkTokenBytes(); + + // optional .hbase.pb.RegionSpecifier region = 2; + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + boolean hasRegion(); + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + } + /** + * Protobuf type {@code hbase.pb.CleanupBulkLoadRequest} + */ + public static final class CleanupBulkLoadRequest extends + com.google.protobuf.GeneratedMessage + implements CleanupBulkLoadRequestOrBuilder { + // Use CleanupBulkLoadRequest.newBuilder() to construct. + private CleanupBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private CleanupBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final CleanupBulkLoadRequest defaultInstance; + public static CleanupBulkLoadRequest getDefaultInstance() { + return defaultInstance; + } + + public CleanupBulkLoadRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CleanupBulkLoadRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + bulkToken_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CleanupBulkLoadRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CleanupBulkLoadRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string bulk_token = 1; + public static final int BULK_TOKEN_FIELD_NUMBER = 1; + private java.lang.Object bulkToken_; + /** + * required string bulk_token = 1; + */ + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string bulk_token = 1; + */ + public java.lang.String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + bulkToken_ = s; + } + return s; + } + } + /** + * required string bulk_token = 1; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional .hbase.pb.RegionSpecifier region = 2; + public static final int REGION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + private void initFields() { + bulkToken_ = ""; + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasBulkToken()) { + memoizedIsInitialized = 0; + return false; + } + if (hasRegion()) { + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getBulkTokenBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, region_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getBulkTokenBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, region_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) obj; + + boolean result = true; + result = result && (hasBulkToken() == other.hasBulkToken()); + if (hasBulkToken()) { + result = result && getBulkToken() + .equals(other.getBulkToken()); + } + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasBulkToken()) { + hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getBulkToken().hashCode(); + } + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.CleanupBulkLoadRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + bulkToken_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.bulkToken_ = bulkToken_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance()) return this; + if (other.hasBulkToken()) { + bitField0_ |= 0x00000001; + bulkToken_ = other.bulkToken_; + onChanged(); + } + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasBulkToken()) { + + return false; + } + if (hasRegion()) { + if (!getRegion().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string bulk_token = 1; + private java.lang.Object bulkToken_ = ""; + /** + * required string bulk_token = 1; + */ + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string bulk_token = 1; + */ + public java.lang.String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + bulkToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string bulk_token = 1; + */ + public com.google.protobuf.ByteString + getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string bulk_token = 1; + */ + public Builder setBulkToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + bulkToken_ = value; + onChanged(); + return this; + } + /** + * required string bulk_token = 1; + */ + public Builder clearBulkToken() { + bitField0_ = (bitField0_ & ~0x00000001); + bulkToken_ = getDefaultInstance().getBulkToken(); + onChanged(); + return this; + } + /** + * required string bulk_token = 1; + */ + public Builder setBulkTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + bulkToken_ = value; + onChanged(); + return this; + } + + // optional .hbase.pb.RegionSpecifier region = 2; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + /** + * optional .hbase.pb.RegionSpecifier region = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadRequest) + } + + static { + defaultInstance = new CleanupBulkLoadRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadRequest) + } + + public interface CleanupBulkLoadResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.CleanupBulkLoadResponse} + */ + public static final class CleanupBulkLoadResponse extends + com.google.protobuf.GeneratedMessage + implements CleanupBulkLoadResponseOrBuilder { + // Use CleanupBulkLoadResponse.newBuilder() to construct. + private CleanupBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private CleanupBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final CleanupBulkLoadResponse defaultInstance; + public static CleanupBulkLoadResponse getDefaultInstance() { + return defaultInstance; + } + + public CleanupBulkLoadResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CleanupBulkLoadResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CleanupBulkLoadResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CleanupBulkLoadResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasLoaded()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -22802,9 +26453,6 @@ public final class ClientProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, loaded_); - } getUnknownFields().writeTo(output); } @@ -22814,10 +26462,6 @@ public final class ClientProtos { if (size != -1) return size; size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, loaded_); - } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -22835,17 +26479,12 @@ public final class ClientProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) obj; boolean result = true; - result = result && (hasLoaded() == other.hasLoaded()); - if (hasLoaded()) { - result = result && (getLoaded() - == other.getLoaded()); - } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -22859,62 +26498,58 @@ public final class ClientProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLoaded()) { - hash = (37 * hash) + LOADED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLoaded()); - } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -22923,7 +26558,7 @@ public final class ClientProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -22935,24 +26570,24 @@ public final class ClientProtos { return builder; } /** - * Protobuf type {@code hbase.pb.BulkLoadHFileResponse} + * Protobuf type {@code hbase.pb.CleanupBulkLoadResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -22972,8 +26607,6 @@ public final class ClientProtos { public Builder clear() { super.clear(); - loaded_ = false; - bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -22983,57 +26616,43 @@ public final class ClientProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_BulkLoadHFileResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.loaded_ = loaded_; - result.bitField0_ = to_bitField0_; + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; - if (other.hasLoaded()) { - setLoaded(other.getLoaded()); - } + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasLoaded()) { - - return false; - } return true; } @@ -23041,11 +26660,11 @@ public final class ClientProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -23054,50 +26673,16 @@ public final class ClientProtos { } return this; } - private int bitField0_; - - // required bool loaded = 1; - private boolean loaded_ ; - /** - * required bool loaded = 1; - */ - public boolean hasLoaded() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bool loaded = 1; - */ - public boolean getLoaded() { - return loaded_; - } - /** - * required bool loaded = 1; - */ - public Builder setLoaded(boolean value) { - bitField0_ |= 0x00000001; - loaded_ = value; - onChanged(); - return this; - } - /** - * required bool loaded = 1; - */ - public Builder clearLoaded() { - bitField0_ = (bitField0_ & ~0x00000001); - loaded_ = false; - onChanged(); - return this; - } - // @@protoc_insertion_point(builder_scope:hbase.pb.BulkLoadHFileResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadResponse) } static { - defaultInstance = new BulkLoadHFileResponse(true); + defaultInstance = new CleanupBulkLoadResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.BulkLoadHFileResponse) + // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadResponse) } public interface CoprocessorServiceCallOrBuilder @@ -34704,6 +38289,22 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done); /** + * rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse); + */ + public abstract void prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse); + */ + public abstract void cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done); + + /** * rpc ExecService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse); */ public abstract void execService( @@ -34765,6 +38366,22 @@ public final class ClientProtos { } @java.lang.Override + public void prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { + impl.prepareBulkLoad(controller, request, done); + } + + @java.lang.Override + public void cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { + impl.cleanupBulkLoad(controller, request, done); + } + + @java.lang.Override public void execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, @@ -34819,10 +38436,14 @@ public final class ClientProtos { case 3: return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request); case 4: - return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); + return impl.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request); case 5: - return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); + return impl.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request); case 6: + return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); + case 7: + return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); + case 8: return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request); default: throw new java.lang.AssertionError("Can't get here."); @@ -34847,10 +38468,14 @@ public final class ClientProtos { case 3: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + case 8: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -34875,10 +38500,14 @@ public final class ClientProtos { case 3: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + case 8: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -34921,6 +38550,22 @@ public final class ClientProtos { com.google.protobuf.RpcCallback done); /** + * rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse); + */ + public abstract void prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse); + */ + public abstract void cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done); + + /** * rpc ExecService(.hbase.pb.CoprocessorServiceRequest) returns (.hbase.pb.CoprocessorServiceResponse); */ public abstract void execService( @@ -34987,16 +38632,26 @@ public final class ClientProtos { done)); return; case 4: + this.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: this.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 5: + case 7: this.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); return; - case 6: + case 8: this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request, com.google.protobuf.RpcUtil.specializeCallback( done)); @@ -35024,10 +38679,14 @@ public final class ClientProtos { case 3: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); + case 8: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -35052,10 +38711,14 @@ public final class ClientProtos { case 3: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(); case 4: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); case 5: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + case 8: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); @@ -35138,12 +38801,42 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance())); } + public void prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance())); + } + + public void cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance())); + } + public void execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(4), + getDescriptor().getMethods().get(6), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), @@ -35158,7 +38851,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(5), + getDescriptor().getMethods().get(7), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(), @@ -35173,7 +38866,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, com.google.protobuf.RpcCallback done) { channel.callMethod( - getDescriptor().getMethods().get(6), + getDescriptor().getMethods().get(8), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(), @@ -35210,6 +38903,16 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request) throws com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request) + throws com.google.protobuf.ServiceException; + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) @@ -35281,12 +38984,36 @@ public final class ClientProtos { } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), + getDescriptor().getMethods().get(6), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); @@ -35298,7 +39025,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), + getDescriptor().getMethods().get(7), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()); @@ -35310,7 +39037,7 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), + getDescriptor().getMethods().get(8), controller, request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); @@ -35417,6 +39144,31 @@ public final class ClientProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_BulkLoadHFileResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_DelegationToken_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_DelegationToken_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CoprocessorServiceCall_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -35568,66 +39320,81 @@ public final class ClientProtos { "_per_result\030\007 \003(\010\022\036\n\026more_results_in_reg" + "ion\030\010 \001(\010\022\031\n\021heartbeat_message\030\t \001(\010\022+\n\014" + "scan_metrics\030\n \001(\0132\025.hbase.pb.ScanMetric" + - "s\"\305\001\n\024BulkLoadHFileRequest\022)\n\006region\030\001 \002" + + "s\"\206\002\n\024BulkLoadHFileRequest\022)\n\006region\030\001 \002" + "(\0132\031.hbase.pb.RegionSpecifier\022>\n\013family_" + "path\030\002 \003(\0132).hbase.pb.BulkLoadHFileReque" + - "st.FamilyPath\022\026\n\016assign_seq_num\030\003 \001(\010\032*\n" + - "\nFamilyPath\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(" + - "\t\"\'\n\025BulkLoadHFileResponse\022\016\n\006loaded\030\001 \002" + - "(\010\"a\n\026CoprocessorServiceCall\022\013\n\003row\030\001 \002(", - "\014\022\024\n\014service_name\030\002 \002(\t\022\023\n\013method_name\030\003" + - " \002(\t\022\017\n\007request\030\004 \002(\014\"B\n\030CoprocessorServ" + - "iceResult\022&\n\005value\030\001 \001(\0132\027.hbase.pb.Name" + - "BytesPair\"v\n\031CoprocessorServiceRequest\022)" + - "\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSpecifie" + - "r\022.\n\004call\030\002 \002(\0132 .hbase.pb.CoprocessorSe" + - "rviceCall\"o\n\032CoprocessorServiceResponse\022" + - ")\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSpecifi" + - "er\022&\n\005value\030\002 \002(\0132\027.hbase.pb.NameBytesPa" + - "ir\"\226\001\n\006Action\022\r\n\005index\030\001 \001(\r\022)\n\010mutation", - "\030\002 \001(\0132\027.hbase.pb.MutationProto\022\032\n\003get\030\003" + - " \001(\0132\r.hbase.pb.Get\0226\n\014service_call\030\004 \001(" + - "\0132 .hbase.pb.CoprocessorServiceCall\"k\n\014R" + - "egionAction\022)\n\006region\030\001 \002(\0132\031.hbase.pb.R" + - "egionSpecifier\022\016\n\006atomic\030\002 \001(\010\022 \n\006action" + - "\030\003 \003(\0132\020.hbase.pb.Action\"c\n\017RegionLoadSt" + - "ats\022\027\n\014memstoreLoad\030\001 \001(\005:\0010\022\030\n\rheapOccu" + - "pancy\030\002 \001(\005:\0010\022\035\n\022compactionPressure\030\003 \001" + - "(\005:\0010\"j\n\024MultiRegionLoadStats\022)\n\006region\030" + - "\001 \003(\0132\031.hbase.pb.RegionSpecifier\022\'\n\004stat", - "\030\002 \003(\0132\031.hbase.pb.RegionLoadStats\"\336\001\n\021Re" + - "sultOrException\022\r\n\005index\030\001 \001(\r\022 \n\006result" + - "\030\002 \001(\0132\020.hbase.pb.Result\022*\n\texception\030\003 " + - "\001(\0132\027.hbase.pb.NameBytesPair\022:\n\016service_" + - "result\030\004 \001(\0132\".hbase.pb.CoprocessorServi" + - "ceResult\0220\n\tloadStats\030\005 \001(\0132\031.hbase.pb.R" + - "egionLoadStatsB\002\030\001\"x\n\022RegionActionResult" + - "\0226\n\021resultOrException\030\001 \003(\0132\033.hbase.pb.R" + - "esultOrException\022*\n\texception\030\002 \001(\0132\027.hb" + - "ase.pb.NameBytesPair\"x\n\014MultiRequest\022,\n\014", - "regionAction\030\001 \003(\0132\026.hbase.pb.RegionActi" + - "on\022\022\n\nnonceGroup\030\002 \001(\004\022&\n\tcondition\030\003 \001(" + - "\0132\023.hbase.pb.Condition\"\226\001\n\rMultiResponse" + - "\0228\n\022regionActionResult\030\001 \003(\0132\034.hbase.pb." + - "RegionActionResult\022\021\n\tprocessed\030\002 \001(\010\0228\n" + - "\020regionStatistics\030\003 \001(\0132\036.hbase.pb.Multi" + - "RegionLoadStats*\'\n\013Consistency\022\n\n\006STRONG" + - "\020\000\022\014\n\010TIMELINE\020\0012\203\004\n\rClientService\0222\n\003Ge" + - "t\022\024.hbase.pb.GetRequest\032\025.hbase.pb.GetRe" + - "sponse\022;\n\006Mutate\022\027.hbase.pb.MutateReques", - "t\032\030.hbase.pb.MutateResponse\0225\n\004Scan\022\025.hb" + - "ase.pb.ScanRequest\032\026.hbase.pb.ScanRespon" + - "se\022P\n\rBulkLoadHFile\022\036.hbase.pb.BulkLoadH" + - "FileRequest\032\037.hbase.pb.BulkLoadHFileResp" + - "onse\022X\n\013ExecService\022#.hbase.pb.Coprocess" + - "orServiceRequest\032$.hbase.pb.CoprocessorS" + - "erviceResponse\022d\n\027ExecRegionServerServic" + - "e\022#.hbase.pb.CoprocessorServiceRequest\032$" + - ".hbase.pb.CoprocessorServiceResponse\0228\n\005" + - "Multi\022\026.hbase.pb.MultiRequest\032\027.hbase.pb", - ".MultiResponseBB\n*org.apache.hadoop.hbas" + - "e.protobuf.generatedB\014ClientProtosH\001\210\001\001\240" + - "\001\001" + "st.FamilyPath\022\026\n\016assign_seq_num\030\003 \001(\010\022+\n" + + "\010fs_token\030\004 \001(\0132\031.hbase.pb.DelegationTok" + + "en\022\022\n\nbulk_token\030\005 \001(\t\032*\n\nFamilyPath\022\016\n\006" + + "family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoadHF", + "ileResponse\022\016\n\006loaded\030\001 \002(\010\"V\n\017Delegatio" + + "nToken\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010password\030\002" + + " \001(\014\022\014\n\004kind\030\003 \001(\t\022\017\n\007service\030\004 \001(\t\"l\n\026P" + + "repareBulkLoadRequest\022\'\n\ntable_name\030\001 \002(" + + "\0132\023.hbase.pb.TableName\022)\n\006region\030\002 \001(\0132\031" + + ".hbase.pb.RegionSpecifier\"-\n\027PrepareBulk" + + "LoadResponse\022\022\n\nbulk_token\030\001 \002(\t\"W\n\026Clea" + + "nupBulkLoadRequest\022\022\n\nbulk_token\030\001 \002(\t\022)" + + "\n\006region\030\002 \001(\0132\031.hbase.pb.RegionSpecifie" + + "r\"\031\n\027CleanupBulkLoadResponse\"a\n\026Coproces", + "sorServiceCall\022\013\n\003row\030\001 \002(\014\022\024\n\014service_n" + + "ame\030\002 \002(\t\022\023\n\013method_name\030\003 \002(\t\022\017\n\007reques" + + "t\030\004 \002(\014\"B\n\030CoprocessorServiceResult\022&\n\005v" + + "alue\030\001 \001(\0132\027.hbase.pb.NameBytesPair\"v\n\031C" + + "oprocessorServiceRequest\022)\n\006region\030\001 \002(\013" + + "2\031.hbase.pb.RegionSpecifier\022.\n\004call\030\002 \002(" + + "\0132 .hbase.pb.CoprocessorServiceCall\"o\n\032C" + + "oprocessorServiceResponse\022)\n\006region\030\001 \002(" + + "\0132\031.hbase.pb.RegionSpecifier\022&\n\005value\030\002 " + + "\002(\0132\027.hbase.pb.NameBytesPair\"\226\001\n\006Action\022", + "\r\n\005index\030\001 \001(\r\022)\n\010mutation\030\002 \001(\0132\027.hbase" + + ".pb.MutationProto\022\032\n\003get\030\003 \001(\0132\r.hbase.p" + + "b.Get\0226\n\014service_call\030\004 \001(\0132 .hbase.pb.C" + + "oprocessorServiceCall\"k\n\014RegionAction\022)\n" + + "\006region\030\001 \002(\0132\031.hbase.pb.RegionSpecifier" + + "\022\016\n\006atomic\030\002 \001(\010\022 \n\006action\030\003 \003(\0132\020.hbase" + + ".pb.Action\"c\n\017RegionLoadStats\022\027\n\014memstor" + + "eLoad\030\001 \001(\005:\0010\022\030\n\rheapOccupancy\030\002 \001(\005:\0010" + + "\022\035\n\022compactionPressure\030\003 \001(\005:\0010\"j\n\024Multi" + + "RegionLoadStats\022)\n\006region\030\001 \003(\0132\031.hbase.", + "pb.RegionSpecifier\022\'\n\004stat\030\002 \003(\0132\031.hbase" + + ".pb.RegionLoadStats\"\336\001\n\021ResultOrExceptio" + + "n\022\r\n\005index\030\001 \001(\r\022 \n\006result\030\002 \001(\0132\020.hbase" + + ".pb.Result\022*\n\texception\030\003 \001(\0132\027.hbase.pb" + + ".NameBytesPair\022:\n\016service_result\030\004 \001(\0132\"" + + ".hbase.pb.CoprocessorServiceResult\0220\n\tlo" + + "adStats\030\005 \001(\0132\031.hbase.pb.RegionLoadStats" + + "B\002\030\001\"x\n\022RegionActionResult\0226\n\021resultOrEx" + + "ception\030\001 \003(\0132\033.hbase.pb.ResultOrExcepti" + + "on\022*\n\texception\030\002 \001(\0132\027.hbase.pb.NameByt", + "esPair\"x\n\014MultiRequest\022,\n\014regionAction\030\001" + + " \003(\0132\026.hbase.pb.RegionAction\022\022\n\nnonceGro" + + "up\030\002 \001(\004\022&\n\tcondition\030\003 \001(\0132\023.hbase.pb.C" + + "ondition\"\226\001\n\rMultiResponse\0228\n\022regionActi" + + "onResult\030\001 \003(\0132\034.hbase.pb.RegionActionRe" + + "sult\022\021\n\tprocessed\030\002 \001(\010\0228\n\020regionStatist" + + "ics\030\003 \001(\0132\036.hbase.pb.MultiRegionLoadStat" + + "s*\'\n\013Consistency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE" + + "\020\0012\263\005\n\rClientService\0222\n\003Get\022\024.hbase.pb.G" + + "etRequest\032\025.hbase.pb.GetResponse\022;\n\006Muta", + "te\022\027.hbase.pb.MutateRequest\032\030.hbase.pb.M" + + "utateResponse\0225\n\004Scan\022\025.hbase.pb.ScanReq" + + "uest\032\026.hbase.pb.ScanResponse\022P\n\rBulkLoad" + + "HFile\022\036.hbase.pb.BulkLoadHFileRequest\032\037." + + "hbase.pb.BulkLoadHFileResponse\022V\n\017Prepar" + + "eBulkLoad\022 .hbase.pb.PrepareBulkLoadRequ" + + "est\032!.hbase.pb.PrepareBulkLoadResponse\022V" + + "\n\017CleanupBulkLoad\022 .hbase.pb.CleanupBulk" + + "LoadRequest\032!.hbase.pb.CleanupBulkLoadRe" + + "sponse\022X\n\013ExecService\022#.hbase.pb.Coproce", + "ssorServiceRequest\032$.hbase.pb.Coprocesso" + + "rServiceResponse\022d\n\027ExecRegionServerServ" + + "ice\022#.hbase.pb.CoprocessorServiceRequest" + + "\032$.hbase.pb.CoprocessorServiceResponse\0228" + + "\n\005Multi\022\026.hbase.pb.MultiRequest\032\027.hbase." + + "pb.MultiResponseBB\n*org.apache.hadoop.hb" + + "ase.protobuf.generatedB\014ClientProtosH\001\210\001" + + "\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -35735,7 +39502,7 @@ public final class ClientProtos { internal_static_hbase_pb_BulkLoadHFileRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_BulkLoadHFileRequest_descriptor, - new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", }); + new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", }); internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_descriptor = internal_static_hbase_pb_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); internal_static_hbase_pb_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new @@ -35748,74 +39515,104 @@ public final class ClientProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_BulkLoadHFileResponse_descriptor, new java.lang.String[] { "Loaded", }); - internal_static_hbase_pb_CoprocessorServiceCall_descriptor = + internal_static_hbase_pb_DelegationToken_descriptor = getDescriptor().getMessageTypes().get(16); + internal_static_hbase_pb_DelegationToken_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_DelegationToken_descriptor, + new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }); + internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor, + new java.lang.String[] { "TableName", "Region", }); + internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor, + new java.lang.String[] { "BulkToken", }); + internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor, + new java.lang.String[] { "BulkToken", "Region", }); + internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_CoprocessorServiceCall_descriptor = + getDescriptor().getMessageTypes().get(21); internal_static_hbase_pb_CoprocessorServiceCall_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CoprocessorServiceCall_descriptor, new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }); internal_static_hbase_pb_CoprocessorServiceResult_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(22); internal_static_hbase_pb_CoprocessorServiceResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CoprocessorServiceResult_descriptor, new java.lang.String[] { "Value", }); internal_static_hbase_pb_CoprocessorServiceRequest_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(23); internal_static_hbase_pb_CoprocessorServiceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CoprocessorServiceRequest_descriptor, new java.lang.String[] { "Region", "Call", }); internal_static_hbase_pb_CoprocessorServiceResponse_descriptor = - getDescriptor().getMessageTypes().get(19); + getDescriptor().getMessageTypes().get(24); internal_static_hbase_pb_CoprocessorServiceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CoprocessorServiceResponse_descriptor, new java.lang.String[] { "Region", "Value", }); internal_static_hbase_pb_Action_descriptor = - getDescriptor().getMessageTypes().get(20); + getDescriptor().getMessageTypes().get(25); internal_static_hbase_pb_Action_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_Action_descriptor, new java.lang.String[] { "Index", "Mutation", "Get", "ServiceCall", }); internal_static_hbase_pb_RegionAction_descriptor = - getDescriptor().getMessageTypes().get(21); + getDescriptor().getMessageTypes().get(26); internal_static_hbase_pb_RegionAction_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RegionAction_descriptor, new java.lang.String[] { "Region", "Atomic", "Action", }); internal_static_hbase_pb_RegionLoadStats_descriptor = - getDescriptor().getMessageTypes().get(22); + getDescriptor().getMessageTypes().get(27); internal_static_hbase_pb_RegionLoadStats_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RegionLoadStats_descriptor, new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", "CompactionPressure", }); internal_static_hbase_pb_MultiRegionLoadStats_descriptor = - getDescriptor().getMessageTypes().get(23); + getDescriptor().getMessageTypes().get(28); internal_static_hbase_pb_MultiRegionLoadStats_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_MultiRegionLoadStats_descriptor, new java.lang.String[] { "Region", "Stat", }); internal_static_hbase_pb_ResultOrException_descriptor = - getDescriptor().getMessageTypes().get(24); + getDescriptor().getMessageTypes().get(29); internal_static_hbase_pb_ResultOrException_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ResultOrException_descriptor, new java.lang.String[] { "Index", "Result", "Exception", "ServiceResult", "LoadStats", }); internal_static_hbase_pb_RegionActionResult_descriptor = - getDescriptor().getMessageTypes().get(25); + getDescriptor().getMessageTypes().get(30); internal_static_hbase_pb_RegionActionResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RegionActionResult_descriptor, new java.lang.String[] { "ResultOrException", "Exception", }); internal_static_hbase_pb_MultiRequest_descriptor = - getDescriptor().getMessageTypes().get(26); + getDescriptor().getMessageTypes().get(31); internal_static_hbase_pb_MultiRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_MultiRequest_descriptor, new java.lang.String[] { "RegionAction", "NonceGroup", "Condition", }); internal_static_hbase_pb_MultiResponse_descriptor = - getDescriptor().getMessageTypes().get(27); + getDescriptor().getMessageTypes().get(32); internal_static_hbase_pb_MultiResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_MultiResponse_descriptor, diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java index 538e031..29c49de 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java @@ -54,11 +54,11 @@ public final class SecureBulkLoadProtos { /** * required .hbase.pb.DelegationToken fs_token = 3; */ - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken getFsToken(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken(); /** * required .hbase.pb.DelegationToken fs_token = 3; */ - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder getFsTokenOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder(); // required string bulk_token = 4; /** @@ -140,11 +140,11 @@ public final class SecureBulkLoadProtos { break; } case 26: { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = fsToken_.toBuilder(); } - fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.PARSER, extensionRegistry); + fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(fsToken_); fsToken_ = subBuilder.buildPartial(); @@ -254,7 +254,7 @@ public final class SecureBulkLoadProtos { // required .hbase.pb.DelegationToken fs_token = 3; public static final int FS_TOKEN_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken fsToken_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_; /** * required .hbase.pb.DelegationToken fs_token = 3; */ @@ -264,13 +264,13 @@ public final class SecureBulkLoadProtos { /** * required .hbase.pb.DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken getFsToken() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() { return fsToken_; } /** * required .hbase.pb.DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { return fsToken_; } @@ -320,7 +320,7 @@ public final class SecureBulkLoadProtos { private void initFields() { familyPath_ = java.util.Collections.emptyList(); assignSeqNum_ = false; - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); bulkToken_ = ""; } private byte memoizedIsInitialized = -1; @@ -575,7 +575,7 @@ public final class SecureBulkLoadProtos { assignSeqNum_ = false; bitField0_ = (bitField0_ & ~0x00000002); if (fsTokenBuilder_ == null) { - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); } else { fsTokenBuilder_.clear(); } @@ -1003,9 +1003,9 @@ public final class SecureBulkLoadProtos { } // required .hbase.pb.DelegationToken fs_token = 3; - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder> fsTokenBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_; /** * required .hbase.pb.DelegationToken fs_token = 3; */ @@ -1015,7 +1015,7 @@ public final class SecureBulkLoadProtos { /** * required .hbase.pb.DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken getFsToken() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() { if (fsTokenBuilder_ == null) { return fsToken_; } else { @@ -1025,7 +1025,7 @@ public final class SecureBulkLoadProtos { /** * required .hbase.pb.DelegationToken fs_token = 3; */ - public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken value) { + public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) { if (fsTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -1042,7 +1042,7 @@ public final class SecureBulkLoadProtos { * required .hbase.pb.DelegationToken fs_token = 3; */ public Builder setFsToken( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) { if (fsTokenBuilder_ == null) { fsToken_ = builderForValue.build(); onChanged(); @@ -1055,12 +1055,12 @@ public final class SecureBulkLoadProtos { /** * required .hbase.pb.DelegationToken fs_token = 3; */ - public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken value) { + public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) { if (fsTokenBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && - fsToken_ != org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance()) { + fsToken_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) { fsToken_ = - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial(); } else { fsToken_ = value; } @@ -1076,7 +1076,7 @@ public final class SecureBulkLoadProtos { */ public Builder clearFsToken() { if (fsTokenBuilder_ == null) { - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); onChanged(); } else { fsTokenBuilder_.clear(); @@ -1087,7 +1087,7 @@ public final class SecureBulkLoadProtos { /** * required .hbase.pb.DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder getFsTokenBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() { bitField0_ |= 0x00000004; onChanged(); return getFsTokenFieldBuilder().getBuilder(); @@ -1095,7 +1095,7 @@ public final class SecureBulkLoadProtos { /** * required .hbase.pb.DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { if (fsTokenBuilder_ != null) { return fsTokenBuilder_.getMessageOrBuilder(); } else { @@ -1106,11 +1106,11 @@ public final class SecureBulkLoadProtos { * required .hbase.pb.DelegationToken fs_token = 3; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> getFsTokenFieldBuilder() { if (fsTokenBuilder_ == null) { fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>( fsToken_, getParentForChildren(), isClean()); @@ -1646,2799 +1646,6 @@ public final class SecureBulkLoadProtos { // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadHFilesResponse) } - public interface DelegationTokenOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes identifier = 1; - /** - * optional bytes identifier = 1; - */ - boolean hasIdentifier(); - /** - * optional bytes identifier = 1; - */ - com.google.protobuf.ByteString getIdentifier(); - - // optional bytes password = 2; - /** - * optional bytes password = 2; - */ - boolean hasPassword(); - /** - * optional bytes password = 2; - */ - com.google.protobuf.ByteString getPassword(); - - // optional string kind = 3; - /** - * optional string kind = 3; - */ - boolean hasKind(); - /** - * optional string kind = 3; - */ - java.lang.String getKind(); - /** - * optional string kind = 3; - */ - com.google.protobuf.ByteString - getKindBytes(); - - // optional string service = 4; - /** - * optional string service = 4; - */ - boolean hasService(); - /** - * optional string service = 4; - */ - java.lang.String getService(); - /** - * optional string service = 4; - */ - com.google.protobuf.ByteString - getServiceBytes(); - } - /** - * Protobuf type {@code hbase.pb.DelegationToken} - */ - public static final class DelegationToken extends - com.google.protobuf.GeneratedMessage - implements DelegationTokenOrBuilder { - // Use DelegationToken.newBuilder() to construct. - private DelegationToken(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DelegationToken defaultInstance; - public static DelegationToken getDefaultInstance() { - return defaultInstance; - } - - public DelegationToken getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private DelegationToken( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - identifier_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - password_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - kind_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000008; - service_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_DelegationToken_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DelegationToken parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DelegationToken(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional bytes identifier = 1; - public static final int IDENTIFIER_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString identifier_; - /** - * optional bytes identifier = 1; - */ - public boolean hasIdentifier() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional bytes identifier = 1; - */ - public com.google.protobuf.ByteString getIdentifier() { - return identifier_; - } - - // optional bytes password = 2; - public static final int PASSWORD_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString password_; - /** - * optional bytes password = 2; - */ - public boolean hasPassword() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes password = 2; - */ - public com.google.protobuf.ByteString getPassword() { - return password_; - } - - // optional string kind = 3; - public static final int KIND_FIELD_NUMBER = 3; - private java.lang.Object kind_; - /** - * optional string kind = 3; - */ - public boolean hasKind() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional string kind = 3; - */ - public java.lang.String getKind() { - java.lang.Object ref = kind_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - kind_ = s; - } - return s; - } - } - /** - * optional string kind = 3; - */ - public com.google.protobuf.ByteString - getKindBytes() { - java.lang.Object ref = kind_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - kind_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional string service = 4; - public static final int SERVICE_FIELD_NUMBER = 4; - private java.lang.Object service_; - /** - * optional string service = 4; - */ - public boolean hasService() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional string service = 4; - */ - public java.lang.String getService() { - java.lang.Object ref = service_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - service_ = s; - } - return s; - } - } - /** - * optional string service = 4; - */ - public com.google.protobuf.ByteString - getServiceBytes() { - java.lang.Object ref = service_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - service_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - identifier_ = com.google.protobuf.ByteString.EMPTY; - password_ = com.google.protobuf.ByteString.EMPTY; - kind_ = ""; - service_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, identifier_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, password_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getKindBytes()); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getServiceBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, identifier_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, password_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getKindBytes()); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getServiceBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken) obj; - - boolean result = true; - result = result && (hasIdentifier() == other.hasIdentifier()); - if (hasIdentifier()) { - result = result && getIdentifier() - .equals(other.getIdentifier()); - } - result = result && (hasPassword() == other.hasPassword()); - if (hasPassword()) { - result = result && getPassword() - .equals(other.getPassword()); - } - result = result && (hasKind() == other.hasKind()); - if (hasKind()) { - result = result && getKind() - .equals(other.getKind()); - } - result = result && (hasService() == other.hasService()); - if (hasService()) { - result = result && getService() - .equals(other.getService()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasIdentifier()) { - hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER; - hash = (53 * hash) + getIdentifier().hashCode(); - } - if (hasPassword()) { - hash = (37 * hash) + PASSWORD_FIELD_NUMBER; - hash = (53 * hash) + getPassword().hashCode(); - } - if (hasKind()) { - hash = (37 * hash) + KIND_FIELD_NUMBER; - hash = (53 * hash) + getKind().hashCode(); - } - if (hasService()) { - hash = (37 * hash) + SERVICE_FIELD_NUMBER; - hash = (53 * hash) + getService().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.DelegationToken} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_DelegationToken_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_DelegationToken_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - identifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - password_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - kind_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); - service_ = ""; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_DelegationToken_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken build() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.identifier_ = identifier_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.password_ = password_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.kind_ = kind_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.service_ = service_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance()) return this; - if (other.hasIdentifier()) { - setIdentifier(other.getIdentifier()); - } - if (other.hasPassword()) { - setPassword(other.getPassword()); - } - if (other.hasKind()) { - bitField0_ |= 0x00000004; - kind_ = other.kind_; - onChanged(); - } - if (other.hasService()) { - bitField0_ |= 0x00000008; - service_ = other.service_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional bytes identifier = 1; - private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY; - /** - * optional bytes identifier = 1; - */ - public boolean hasIdentifier() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional bytes identifier = 1; - */ - public com.google.protobuf.ByteString getIdentifier() { - return identifier_; - } - /** - * optional bytes identifier = 1; - */ - public Builder setIdentifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - identifier_ = value; - onChanged(); - return this; - } - /** - * optional bytes identifier = 1; - */ - public Builder clearIdentifier() { - bitField0_ = (bitField0_ & ~0x00000001); - identifier_ = getDefaultInstance().getIdentifier(); - onChanged(); - return this; - } - - // optional bytes password = 2; - private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY; - /** - * optional bytes password = 2; - */ - public boolean hasPassword() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes password = 2; - */ - public com.google.protobuf.ByteString getPassword() { - return password_; - } - /** - * optional bytes password = 2; - */ - public Builder setPassword(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - password_ = value; - onChanged(); - return this; - } - /** - * optional bytes password = 2; - */ - public Builder clearPassword() { - bitField0_ = (bitField0_ & ~0x00000002); - password_ = getDefaultInstance().getPassword(); - onChanged(); - return this; - } - - // optional string kind = 3; - private java.lang.Object kind_ = ""; - /** - * optional string kind = 3; - */ - public boolean hasKind() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional string kind = 3; - */ - public java.lang.String getKind() { - java.lang.Object ref = kind_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - kind_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string kind = 3; - */ - public com.google.protobuf.ByteString - getKindBytes() { - java.lang.Object ref = kind_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - kind_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string kind = 3; - */ - public Builder setKind( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - kind_ = value; - onChanged(); - return this; - } - /** - * optional string kind = 3; - */ - public Builder clearKind() { - bitField0_ = (bitField0_ & ~0x00000004); - kind_ = getDefaultInstance().getKind(); - onChanged(); - return this; - } - /** - * optional string kind = 3; - */ - public Builder setKindBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - kind_ = value; - onChanged(); - return this; - } - - // optional string service = 4; - private java.lang.Object service_ = ""; - /** - * optional string service = 4; - */ - public boolean hasService() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional string service = 4; - */ - public java.lang.String getService() { - java.lang.Object ref = service_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - service_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string service = 4; - */ - public com.google.protobuf.ByteString - getServiceBytes() { - java.lang.Object ref = service_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - service_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string service = 4; - */ - public Builder setService( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - service_ = value; - onChanged(); - return this; - } - /** - * optional string service = 4; - */ - public Builder clearService() { - bitField0_ = (bitField0_ & ~0x00000008); - service_ = getDefaultInstance().getService(); - onChanged(); - return this; - } - /** - * optional string service = 4; - */ - public Builder setServiceBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - service_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.DelegationToken) - } - - static { - defaultInstance = new DelegationToken(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.DelegationToken) - } - - public interface PrepareBulkLoadRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .hbase.pb.TableName table_name = 1; - /** - * required .hbase.pb.TableName table_name = 1; - */ - boolean hasTableName(); - /** - * required .hbase.pb.TableName table_name = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); - /** - * required .hbase.pb.TableName table_name = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); - } - /** - * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} - */ - public static final class PrepareBulkLoadRequest extends - com.google.protobuf.GeneratedMessage - implements PrepareBulkLoadRequestOrBuilder { - // Use PrepareBulkLoadRequest.newBuilder() to construct. - private PrepareBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private PrepareBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final PrepareBulkLoadRequest defaultInstance; - public static PrepareBulkLoadRequest getDefaultInstance() { - return defaultInstance; - } - - public PrepareBulkLoadRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private PrepareBulkLoadRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = tableName_.toBuilder(); - } - tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(tableName_); - tableName_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public PrepareBulkLoadRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new PrepareBulkLoadRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required .hbase.pb.TableName table_name = 1; - public static final int TABLE_NAME_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; - /** - * required .hbase.pb.TableName table_name = 1; - */ - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { - return tableName_; - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - return tableName_; - } - - private void initFields() { - tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!getTableName().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, tableName_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, tableName_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.PrepareBulkLoadRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getTableNameFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - } else { - tableNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest build() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (tableNameBuilder_ == null) { - result.tableName_ = tableName_; - } else { - result.tableName_ = tableNameBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance()) return this; - if (other.hasTableName()) { - mergeTableName(other.getTableName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - if (!getTableName().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required .hbase.pb.TableName table_name = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; - /** - * required .hbase.pb.TableName table_name = 1; - */ - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { - if (tableNameBuilder_ == null) { - return tableName_; - } else { - return tableNameBuilder_.getMessage(); - } - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { - if (tableNameBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - tableName_ = value; - onChanged(); - } else { - tableNameBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public Builder setTableName( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { - if (tableNameBuilder_ == null) { - tableName_ = builderForValue.build(); - onChanged(); - } else { - tableNameBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { - if (tableNameBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { - tableName_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); - } else { - tableName_ = value; - } - onChanged(); - } else { - tableNameBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public Builder clearTableName() { - if (tableNameBuilder_ == null) { - tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); - onChanged(); - } else { - tableNameBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getTableNameFieldBuilder().getBuilder(); - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { - if (tableNameBuilder_ != null) { - return tableNameBuilder_.getMessageOrBuilder(); - } else { - return tableName_; - } - } - /** - * required .hbase.pb.TableName table_name = 1; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> - getTableNameFieldBuilder() { - if (tableNameBuilder_ == null) { - tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( - tableName_, - getParentForChildren(), - isClean()); - tableName_ = null; - } - return tableNameBuilder_; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadRequest) - } - - static { - defaultInstance = new PrepareBulkLoadRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadRequest) - } - - public interface PrepareBulkLoadResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string bulk_token = 1; - /** - * required string bulk_token = 1; - */ - boolean hasBulkToken(); - /** - * required string bulk_token = 1; - */ - java.lang.String getBulkToken(); - /** - * required string bulk_token = 1; - */ - com.google.protobuf.ByteString - getBulkTokenBytes(); - } - /** - * Protobuf type {@code hbase.pb.PrepareBulkLoadResponse} - */ - public static final class PrepareBulkLoadResponse extends - com.google.protobuf.GeneratedMessage - implements PrepareBulkLoadResponseOrBuilder { - // Use PrepareBulkLoadResponse.newBuilder() to construct. - private PrepareBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private PrepareBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final PrepareBulkLoadResponse defaultInstance; - public static PrepareBulkLoadResponse getDefaultInstance() { - return defaultInstance; - } - - public PrepareBulkLoadResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private PrepareBulkLoadResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - bulkToken_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public PrepareBulkLoadResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new PrepareBulkLoadResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required string bulk_token = 1; - public static final int BULK_TOKEN_FIELD_NUMBER = 1; - private java.lang.Object bulkToken_; - /** - * required string bulk_token = 1; - */ - public boolean hasBulkToken() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string bulk_token = 1; - */ - public java.lang.String getBulkToken() { - java.lang.Object ref = bulkToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - bulkToken_ = s; - } - return s; - } - } - /** - * required string bulk_token = 1; - */ - public com.google.protobuf.ByteString - getBulkTokenBytes() { - java.lang.Object ref = bulkToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - bulkToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - bulkToken_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasBulkToken()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getBulkTokenBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getBulkTokenBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) obj; - - boolean result = true; - result = result && (hasBulkToken() == other.hasBulkToken()); - if (hasBulkToken()) { - result = result && getBulkToken() - .equals(other.getBulkToken()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasBulkToken()) { - hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; - hash = (53 * hash) + getBulkToken().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.PrepareBulkLoadResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - bulkToken_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse build() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.bulkToken_ = bulkToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance()) return this; - if (other.hasBulkToken()) { - bitField0_ |= 0x00000001; - bulkToken_ = other.bulkToken_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasBulkToken()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required string bulk_token = 1; - private java.lang.Object bulkToken_ = ""; - /** - * required string bulk_token = 1; - */ - public boolean hasBulkToken() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string bulk_token = 1; - */ - public java.lang.String getBulkToken() { - java.lang.Object ref = bulkToken_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - bulkToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string bulk_token = 1; - */ - public com.google.protobuf.ByteString - getBulkTokenBytes() { - java.lang.Object ref = bulkToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - bulkToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * required string bulk_token = 1; - */ - public Builder setBulkToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - bulkToken_ = value; - onChanged(); - return this; - } - /** - * required string bulk_token = 1; - */ - public Builder clearBulkToken() { - bitField0_ = (bitField0_ & ~0x00000001); - bulkToken_ = getDefaultInstance().getBulkToken(); - onChanged(); - return this; - } - /** - * required string bulk_token = 1; - */ - public Builder setBulkTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - bulkToken_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.PrepareBulkLoadResponse) - } - - static { - defaultInstance = new PrepareBulkLoadResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.PrepareBulkLoadResponse) - } - - public interface CleanupBulkLoadRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string bulk_token = 1; - /** - * required string bulk_token = 1; - */ - boolean hasBulkToken(); - /** - * required string bulk_token = 1; - */ - java.lang.String getBulkToken(); - /** - * required string bulk_token = 1; - */ - com.google.protobuf.ByteString - getBulkTokenBytes(); - } - /** - * Protobuf type {@code hbase.pb.CleanupBulkLoadRequest} - */ - public static final class CleanupBulkLoadRequest extends - com.google.protobuf.GeneratedMessage - implements CleanupBulkLoadRequestOrBuilder { - // Use CleanupBulkLoadRequest.newBuilder() to construct. - private CleanupBulkLoadRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CleanupBulkLoadRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CleanupBulkLoadRequest defaultInstance; - public static CleanupBulkLoadRequest getDefaultInstance() { - return defaultInstance; - } - - public CleanupBulkLoadRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CleanupBulkLoadRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - bulkToken_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CleanupBulkLoadRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CleanupBulkLoadRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required string bulk_token = 1; - public static final int BULK_TOKEN_FIELD_NUMBER = 1; - private java.lang.Object bulkToken_; - /** - * required string bulk_token = 1; - */ - public boolean hasBulkToken() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string bulk_token = 1; - */ - public java.lang.String getBulkToken() { - java.lang.Object ref = bulkToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - bulkToken_ = s; - } - return s; - } - } - /** - * required string bulk_token = 1; - */ - public com.google.protobuf.ByteString - getBulkTokenBytes() { - java.lang.Object ref = bulkToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - bulkToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - bulkToken_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasBulkToken()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getBulkTokenBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getBulkTokenBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest) obj; - - boolean result = true; - result = result && (hasBulkToken() == other.hasBulkToken()); - if (hasBulkToken()) { - result = result && getBulkToken() - .equals(other.getBulkToken()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasBulkToken()) { - hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; - hash = (53 * hash) + getBulkToken().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.CleanupBulkLoadRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - bulkToken_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest build() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.bulkToken_ = bulkToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance()) return this; - if (other.hasBulkToken()) { - bitField0_ |= 0x00000001; - bulkToken_ = other.bulkToken_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasBulkToken()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required string bulk_token = 1; - private java.lang.Object bulkToken_ = ""; - /** - * required string bulk_token = 1; - */ - public boolean hasBulkToken() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string bulk_token = 1; - */ - public java.lang.String getBulkToken() { - java.lang.Object ref = bulkToken_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - bulkToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string bulk_token = 1; - */ - public com.google.protobuf.ByteString - getBulkTokenBytes() { - java.lang.Object ref = bulkToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - bulkToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * required string bulk_token = 1; - */ - public Builder setBulkToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - bulkToken_ = value; - onChanged(); - return this; - } - /** - * required string bulk_token = 1; - */ - public Builder clearBulkToken() { - bitField0_ = (bitField0_ & ~0x00000001); - bulkToken_ = getDefaultInstance().getBulkToken(); - onChanged(); - return this; - } - /** - * required string bulk_token = 1; - */ - public Builder setBulkTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - bulkToken_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadRequest) - } - - static { - defaultInstance = new CleanupBulkLoadRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadRequest) - } - - public interface CleanupBulkLoadResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code hbase.pb.CleanupBulkLoadResponse} - */ - public static final class CleanupBulkLoadResponse extends - com.google.protobuf.GeneratedMessage - implements CleanupBulkLoadResponseOrBuilder { - // Use CleanupBulkLoadResponse.newBuilder() to construct. - private CleanupBulkLoadResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CleanupBulkLoadResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CleanupBulkLoadResponse defaultInstance; - public static CleanupBulkLoadResponse getDefaultInstance() { - return defaultInstance; - } - - public CleanupBulkLoadResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CleanupBulkLoadResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CleanupBulkLoadResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CleanupBulkLoadResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.CleanupBulkLoadResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse build() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.CleanupBulkLoadResponse) - } - - static { - defaultInstance = new CleanupBulkLoadResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.CleanupBulkLoadResponse) - } - /** * Protobuf service {@code hbase.pb.SecureBulkLoadService} */ @@ -4452,8 +1659,8 @@ public final class SecureBulkLoadProtos { */ public abstract void prepareBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done); /** * rpc SecureBulkLoadHFiles(.hbase.pb.SecureBulkLoadHFilesRequest) returns (.hbase.pb.SecureBulkLoadHFilesResponse); @@ -4468,8 +1675,8 @@ public final class SecureBulkLoadProtos { */ public abstract void cleanupBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done); } @@ -4479,8 +1686,8 @@ public final class SecureBulkLoadProtos { @java.lang.Override public void prepareBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { impl.prepareBulkLoad(controller, request, done); } @@ -4495,8 +1702,8 @@ public final class SecureBulkLoadProtos { @java.lang.Override public void cleanupBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { impl.cleanupBulkLoad(controller, request, done); } @@ -4523,11 +1730,11 @@ public final class SecureBulkLoadProtos { } switch(method.getIndex()) { case 0: - return impl.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)request); + return impl.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request); case 1: return impl.secureBulkLoadHFiles(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)request); case 2: - return impl.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)request); + return impl.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } @@ -4543,11 +1750,11 @@ public final class SecureBulkLoadProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -4563,11 +1770,11 @@ public final class SecureBulkLoadProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -4581,8 +1788,8 @@ public final class SecureBulkLoadProtos { */ public abstract void prepareBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done); /** * rpc SecureBulkLoadHFiles(.hbase.pb.SecureBulkLoadHFilesRequest) returns (.hbase.pb.SecureBulkLoadHFilesResponse); @@ -4597,8 +1804,8 @@ public final class SecureBulkLoadProtos { */ public abstract void cleanupBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done); public static final com.google.protobuf.Descriptors.ServiceDescriptor @@ -4623,8 +1830,8 @@ public final class SecureBulkLoadProtos { } switch(method.getIndex()) { case 0: - this.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 1: @@ -4633,8 +1840,8 @@ public final class SecureBulkLoadProtos { done)); return; case 2: - this.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; default: @@ -4652,11 +1859,11 @@ public final class SecureBulkLoadProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -4672,11 +1879,11 @@ public final class SecureBulkLoadProtos { } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -4700,17 +1907,17 @@ public final class SecureBulkLoadProtos { public void prepareBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance())); } public void secureBulkLoadHFiles( @@ -4730,17 +1937,17 @@ public final class SecureBulkLoadProtos { public void cleanupBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(2), controller, request, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance())); } } @@ -4750,9 +1957,9 @@ public final class SecureBulkLoadProtos { } public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prepareBulkLoad( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles( @@ -4760,9 +1967,9 @@ public final class SecureBulkLoadProtos { org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse cleanupBulkLoad( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request) throws com.google.protobuf.ServiceException; } @@ -4773,15 +1980,15 @@ public final class SecureBulkLoadProtos { private final com.google.protobuf.BlockingRpcChannel channel; - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prepareBulkLoad( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()); } @@ -4797,15 +2004,15 @@ public final class SecureBulkLoadProtos { } - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse cleanupBulkLoad( + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request) + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(2), controller, request, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()); } } @@ -4823,31 +2030,6 @@ public final class SecureBulkLoadProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_DelegationToken_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_DelegationToken_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -4864,23 +2046,16 @@ public final class SecureBulkLoadProtos { "ssign_seq_num\030\002 \001(\010\022+\n\010fs_token\030\003 \002(\0132\031." + "hbase.pb.DelegationToken\022\022\n\nbulk_token\030\004" + " \002(\t\".\n\034SecureBulkLoadHFilesResponse\022\016\n\006" + - "loaded\030\001 \002(\010\"V\n\017DelegationToken\022\022\n\nident" + - "ifier\030\001 \001(\014\022\020\n\010password\030\002 \001(\014\022\014\n\004kind\030\003 " + - "\001(\t\022\017\n\007service\030\004 \001(\t\"A\n\026PrepareBulkLoadR", - "equest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Ta" + - "bleName\"-\n\027PrepareBulkLoadResponse\022\022\n\nbu" + - "lk_token\030\001 \002(\t\",\n\026CleanupBulkLoadRequest" + - "\022\022\n\nbulk_token\030\001 \002(\t\"\031\n\027CleanupBulkLoadR" + - "esponse2\256\002\n\025SecureBulkLoadService\022V\n\017Pre" + - "pareBulkLoad\022 .hbase.pb.PrepareBulkLoadR" + - "equest\032!.hbase.pb.PrepareBulkLoadRespons" + - "e\022e\n\024SecureBulkLoadHFiles\022%.hbase.pb.Sec" + - "ureBulkLoadHFilesRequest\032&.hbase.pb.Secu" + - "reBulkLoadHFilesResponse\022V\n\017CleanupBulkL", - "oad\022 .hbase.pb.CleanupBulkLoadRequest\032!." + - "hbase.pb.CleanupBulkLoadResponseBJ\n*org." + - "apache.hadoop.hbase.protobuf.generatedB\024" + - "SecureBulkLoadProtosH\001\210\001\001\240\001\001" + "loaded\030\001 \002(\0102\256\002\n\025SecureBulkLoadService\022V" + + "\n\017PrepareBulkLoad\022 .hbase.pb.PrepareBulk" + + "LoadRequest\032!.hbase.pb.PrepareBulkLoadRe", + "sponse\022e\n\024SecureBulkLoadHFiles\022%.hbase.p" + + "b.SecureBulkLoadHFilesRequest\032&.hbase.pb" + + ".SecureBulkLoadHFilesResponse\022V\n\017Cleanup" + + "BulkLoad\022 .hbase.pb.CleanupBulkLoadReque" + + "st\032!.hbase.pb.CleanupBulkLoadResponseBJ\n" + + "*org.apache.hadoop.hbase.protobuf.genera" + + "tedB\024SecureBulkLoadProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -4899,36 +2074,6 @@ public final class SecureBulkLoadProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor, new java.lang.String[] { "Loaded", }); - internal_static_hbase_pb_DelegationToken_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_hbase_pb_DelegationToken_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_DelegationToken_descriptor, - new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }); - internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_hbase_pb_PrepareBulkLoadRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_PrepareBulkLoadRequest_descriptor, - new java.lang.String[] { "TableName", }); - internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_hbase_pb_PrepareBulkLoadResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_PrepareBulkLoadResponse_descriptor, - new java.lang.String[] { "BulkToken", }); - internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_hbase_pb_CleanupBulkLoadRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CleanupBulkLoadRequest_descriptor, - new java.lang.String[] { "BulkToken", }); - internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_hbase_pb_CleanupBulkLoadResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CleanupBulkLoadResponse_descriptor, - new java.lang.String[] { }); return null; } }; diff --git a/hbase-protocol/src/main/protobuf/Client.proto b/hbase-protocol/src/main/protobuf/Client.proto index 8a4d459..adb66f7 100644 --- a/hbase-protocol/src/main/protobuf/Client.proto +++ b/hbase-protocol/src/main/protobuf/Client.proto @@ -337,6 +337,8 @@ message BulkLoadHFileRequest { required RegionSpecifier region = 1; repeated FamilyPath family_path = 2; optional bool assign_seq_num = 3; + optional DelegationToken fs_token = 4; + optional string bulk_token = 5; message FamilyPath { required bytes family = 1; @@ -348,6 +350,30 @@ message BulkLoadHFileResponse { required bool loaded = 1; } +message DelegationToken { + optional bytes identifier = 1; + optional bytes password = 2; + optional string kind = 3; + optional string service = 4; +} + +message PrepareBulkLoadRequest { + required TableName table_name = 1; + optional RegionSpecifier region = 2; +} + +message PrepareBulkLoadResponse { + required string bulk_token = 1; +} + +message CleanupBulkLoadRequest { + required string bulk_token = 1; + optional RegionSpecifier region = 2; +} + +message CleanupBulkLoadResponse { +} + message CoprocessorServiceCall { required bytes row = 1; required string service_name = 2; @@ -467,6 +493,12 @@ service ClientService { rpc BulkLoadHFile(BulkLoadHFileRequest) returns(BulkLoadHFileResponse); + rpc PrepareBulkLoad(PrepareBulkLoadRequest) + returns (PrepareBulkLoadResponse); + + rpc CleanupBulkLoad(CleanupBulkLoadRequest) + returns (CleanupBulkLoadResponse); + rpc ExecService(CoprocessorServiceRequest) returns(CoprocessorServiceResponse); diff --git a/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto b/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto index 814735b..290355e 100644 --- a/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto +++ b/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto @@ -37,29 +37,6 @@ message SecureBulkLoadHFilesResponse { required bool loaded = 1; } -message DelegationToken { - optional bytes identifier = 1; - optional bytes password = 2; - optional string kind = 3; - optional string service = 4; -} - -message PrepareBulkLoadRequest { - required TableName table_name = 1; -} - -message PrepareBulkLoadResponse { - required string bulk_token = 1; -} - -message CleanupBulkLoadRequest { - required string bulk_token = 1; - -} - -message CleanupBulkLoadResponse { -} - service SecureBulkLoadService { rpc PrepareBulkLoad(PrepareBulkLoadRequest) returns (PrepareBulkLoadResponse); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java index c7f0b90..1095d6c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java @@ -25,8 +25,8 @@ import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; /** * Coprocessors implement this interface to observe and mediate bulk load operations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index e937569..37c344b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -334,6 +334,26 @@ public abstract class CoprocessorHost { } /** + * Find list of CoprocessorEnvironment that extend/implement the given class/interface + * @param cls the class/interface to look for + * @return the list of CoprocessorEnvironment, or null if not found + */ + public List findCoprocessorEnvironment(Class cls) { + ArrayList ret = new ArrayList(); + + for (E env: coprocessors) { + Coprocessor cp = env.getInstance(); + + if(cp != null) { + if (cls.isAssignableFrom(cp.getClass())) { + ret.add(env); + } + } + } + return ret; + } + + /** * Find a coprocessor environment by class name * @param className the class name * @return the coprocessor, or null if not found diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index a23d739..c04794b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -74,8 +74,8 @@ import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.RegionServerCallable; import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; +import org.apache.hadoop.hbase.client.SecureBulkLoadClient; import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.client.coprocessor.SecureBulkLoadClient; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.HalfStoreFileReader; @@ -87,7 +87,6 @@ import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileScanner; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; @@ -323,6 +322,8 @@ public class LoadIncrementalHFiles extends Configured implements Tool { // LQI queue does not need to be threadsafe -- all operations on this queue // happen in this thread Deque queue = new LinkedList<>(); + SecureBulkLoadClient secureClient = new SecureBulkLoadClient(table); + try { /* * Checking hfile format is a time-consuming operation, we should have an option to skip @@ -346,13 +347,16 @@ public class LoadIncrementalHFiles extends Configured implements Tool { return; } + if(isSecureBulkLoadEndpointAvailable()) { + LOG.warn("SecureBulkLoadEndpoint is deprecated. It will be removed in future releases."); + LOG.warn("Secure bulk load has been integrated into HBase core."); + } + //If using secure bulk load, get source delegation token, and //prepare staging directory and token // fs is the source filesystem fsDelegationToken.acquireDelegationToken(fs); - if(isSecureBulkLoadEndpointAvailable()) { - bulkToken = new SecureBulkLoadClient(table).prepareBulkLoad(table.getName()); - } + bulkToken = secureClient.prepareBulkLoad(admin.getConnection()); // Assumes that region splits can happen while this occurs. while (!queue.isEmpty()) { @@ -391,7 +395,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { } finally { fsDelegationToken.releaseDelegationToken(); if(bulkToken != null) { - new SecureBulkLoadClient(table).cleanupBulkLoad(bulkToken); + secureClient.cleanupBulkLoad(admin.getConnection(), bulkToken); } pool.shutdown(); if (queue != null && !queue.isEmpty()) { @@ -789,21 +793,18 @@ public class LoadIncrementalHFiles extends Configured implements Tool { LOG.debug("Going to connect to server " + getLocation() + " for row " + Bytes.toStringBinary(getRow()) + " with hfile group " + famPaths); byte[] regionName = getLocation().getRegionInfo().getRegionName(); - if (!isSecureBulkLoadEndpointAvailable()) { - success = ProtobufUtil.bulkLoadHFile(getStub(), famPaths, regionName, assignSeqIds); - } else { - try (Table table = conn.getTable(getTableName())) { - secureClient = new SecureBulkLoadClient(table); - success = secureClient.bulkLoadHFiles(famPaths, fsDelegationToken.getUserToken(), - bulkToken, getLocation().getRegionInfo().getStartKey()); - } + try (Table table = conn.getTable(getTableName())) { + secureClient = new SecureBulkLoadClient(table); + success = + secureClient.secureBulkLoadHFiles(getStub(), famPaths, regionName, + assignSeqIds, fsDelegationToken.getUserToken(), bulkToken); } return success; } finally { //Best effort copying of files that might not have been imported //from the staging directory back to original location //in user directory - if(secureClient != null && !success) { + if (secureClient != null && !success) { FileSystem targetFs = FileSystem.get(getConf()); // fs is the source filesystem if(fs == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 1c1000e..d24bd34 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -497,6 +497,8 @@ public class HRegionServer extends HasThread implements private volatile ThroughputController flushThroughputController; + protected final SecureBulkLoadManager secureBulkLoadManager; + /** * Starts a HRegionServer at the default location. * @param conf @@ -617,6 +619,9 @@ public class HRegionServer extends HasThread implements } this.configurationManager = new ConfigurationManager(); + this.secureBulkLoadManager = new SecureBulkLoadManager(this.conf); + this.secureBulkLoadManager.start(); + rpcServices.start(); putUpWebUI(); this.walRoller = new LogRoller(this, this); @@ -3423,4 +3428,9 @@ public class HRegionServer extends HasThread implements public MetricsRegionServer getMetrics() { return metricsRegionServer; } + + @Override + public SecureBulkLoadManager getSecureBulkLoadManager() { + return this.secureBulkLoadManager; + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index c3626fd..38eb66b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -134,6 +134,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest; @@ -147,6 +149,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException; @@ -2036,21 +2040,29 @@ public class RSRpcServices implements HBaseRPCErrorHandler, checkOpen(); requestCount.increment(); Region region = getRegion(request.getRegion()); - List> familyPaths = new ArrayList>(); - for (FamilyPath familyPath: request.getFamilyPathList()) { - familyPaths.add(new Pair(familyPath.getFamily().toByteArray(), - familyPath.getPath())); - } boolean bypass = false; - if (region.getCoprocessorHost() != null) { - bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths); - } boolean loaded = false; - if (!bypass) { - loaded = region.bulkLoadHFiles(familyPaths, request.getAssignSeqNum(), null); - } - if (region.getCoprocessorHost() != null) { - loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded); + + if (!request.hasBulkToken()) { + // Old style bulk load. This will not be supported in future releases + List> familyPaths = + new ArrayList>(request.getFamilyPathCount()); + for (FamilyPath familyPath : request.getFamilyPathList()) { + familyPaths.add(new Pair(familyPath.getFamily().toByteArray(), familyPath + .getPath())); + } + if (region.getCoprocessorHost() != null) { + bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths); + } + if (!bypass) { + loaded = region.bulkLoadHFiles(familyPaths, request.getAssignSeqNum(), null); + } + if (region.getCoprocessorHost() != null) { + loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded); + } + } else { + // secure bulk load + loaded = regionServer.secureBulkLoadManager.secureBulkLoadHFiles(region, request); } BulkLoadHFileResponse.Builder builder = BulkLoadHFileResponse.newBuilder(); builder.setLoaded(loaded); @@ -2061,6 +2073,41 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } @Override + public PrepareBulkLoadResponse prepareBulkLoad(RpcController controller, + PrepareBulkLoadRequest request) throws ServiceException { + try { + checkOpen(); + requestCount.increment(); + + Region region = getRegion(request.getRegion()); + + String bulkToken = regionServer.secureBulkLoadManager.prepareBulkLoad(region, request); + PrepareBulkLoadResponse.Builder builder = PrepareBulkLoadResponse.newBuilder(); + builder.setBulkToken(bulkToken); + return builder.build(); + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + @Override + public CleanupBulkLoadResponse cleanupBulkLoad(RpcController controller, + CleanupBulkLoadRequest request) throws ServiceException { + try { + checkOpen(); + requestCount.increment(); + + Region region = getRegion(request.getRegion()); + + regionServer.secureBulkLoadManager.cleanupBulkLoad(region, request); + CleanupBulkLoadResponse response = CleanupBulkLoadResponse.newBuilder().build(); + return response; + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + @Override public CoprocessorServiceResponse execService(final RpcController controller, final CoprocessorServiceRequest request) throws ServiceException { try { @@ -2924,4 +2971,5 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } return UpdateConfigurationResponse.getDefaultInstance(); } + } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java index c6689a9..356a88b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java @@ -81,6 +81,11 @@ public interface RegionServerServices extends OnlineRegions, FavoredNodesForRegi RegionServerQuotaManager getRegionServerQuotaManager(); /** + * @return RegionServer's instance of {@link SecureBulkLoadManager} + */ + SecureBulkLoadManager getSecureBulkLoadManager(); + + /** * Context for postOpenDeployTasks(). */ class PostOpenDeployContext { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java new file mode 100644 index 0000000..b47b31d --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java @@ -0,0 +1,419 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.regionserver; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver; +import org.apache.hadoop.hbase.coprocessor.ObserverContext; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.ipc.RpcServer; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.regionserver.Region.BulkLoadListener; +import org.apache.hadoop.hbase.security.SecureBulkLoadUtil; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.UserProvider; +import org.apache.hadoop.hbase.security.token.FsDelegationToken; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.FSHDFSUtils; +import org.apache.hadoop.hbase.util.Methods; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; + +import java.io.IOException; +import java.math.BigInteger; +import java.security.PrivilegedAction; +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Bulk loads in secure mode. + * + * This service addresses two issues: + *
    + *
  1. Moving files in a secure filesystem wherein the HBase Client + * and HBase Server are different filesystem users.
  2. + *
  3. Does moving in a secure manner. Assuming that the filesystem + * is POSIX compliant.
  4. + *
+ * + * The algorithm is as follows: + *
    + *
  1. Create an hbase owned staging directory which is + * world traversable (711): {@code /hbase/staging}
  2. + *
  3. A user writes out data to his secure output directory: {@code /user/foo/data}
  4. + *
  5. A call is made to hbase to create a secret staging directory + * which globally rwx (777): {@code /user/staging/averylongandrandomdirectoryname}
  6. + *
  7. The user moves the data into the random staging directory, + * then calls bulkLoadHFiles()
  8. + *
+ * + * Like delegation tokens the strength of the security lies in the length + * and randomness of the secret directory. + * + */ +@InterfaceAudience.Private +public class SecureBulkLoadManager { + + public static final long VERSION = 0L; + + //320/5 = 64 characters + private static final int RANDOM_WIDTH = 320; + private static final int RANDOM_RADIX = 32; + + private static final Log LOG = LogFactory.getLog(SecureBulkLoadManager.class); + + private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx"); + private final static FsPermission PERM_HIDDEN = FsPermission.valueOf("-rwx--x--x"); + + private SecureRandom random; + private FileSystem fs; + private Configuration conf; + + //two levels so it doesn't get deleted accidentally + //no sticky bit in Hadoop 1.0 + private Path baseStagingDir; + + private UserProvider userProvider; + + SecureBulkLoadManager(Configuration conf) { + this.conf = conf; + } + + public void start() { + random = new SecureRandom(); + baseStagingDir = SecureBulkLoadUtil.getBaseStagingDir(conf); + this.userProvider = UserProvider.instantiate(conf); + + try { + fs = FileSystem.get(conf); + fs.mkdirs(baseStagingDir, PERM_HIDDEN); + fs.setPermission(baseStagingDir, PERM_HIDDEN); + FileStatus status = fs.getFileStatus(baseStagingDir); + //no sticky bit in hadoop-1.0, making directory nonempty so it never gets erased + fs.mkdirs(new Path(baseStagingDir,"DONOTERASE"), PERM_HIDDEN); + if (status == null) { + throw new IllegalStateException("Failed to create staging directory " + + baseStagingDir.toString()); + } + if (!status.getPermission().equals(PERM_HIDDEN)) { + throw new IllegalStateException( + "Staging directory already exists but permissions aren't set to '-rwx--x--x' " + + baseStagingDir.toString()); + } + } catch (IOException e) { + LOG.error("Failed to create or set permission on staging directory " + + baseStagingDir.toString(), e); + throw new IllegalStateException("Failed to create or set permission on staging directory " + + baseStagingDir.toString(), e); + } + } + + public void stop() throws IOException { + } + + public String prepareBulkLoad(final Region region, final PrepareBulkLoadRequest request) + throws IOException { + List bulkLoadObservers = getBulkLoadObservers(region); + + if (bulkLoadObservers != null && bulkLoadObservers.size() != 0) { + ObserverContext ctx = + new ObserverContext(); + ctx.prepare((RegionCoprocessorEnvironment) region.getCoprocessorHost() + .findCoprocessorEnvironment(BulkLoadObserver.class).get(0)); + + for (BulkLoadObserver bulkLoadObserver : bulkLoadObservers) { + bulkLoadObserver.prePrepareBulkLoad(ctx, request); + } + } + + String bulkToken = + createStagingDir(baseStagingDir, getActiveUser(), region.getTableDesc().getTableName()) + .toString(); + + return bulkToken; + } + + public void cleanupBulkLoad(final Region region, final CleanupBulkLoadRequest request) + throws IOException { + List bulkLoadObservers = getBulkLoadObservers(region); + + if (bulkLoadObservers != null && bulkLoadObservers.size() != 0) { + ObserverContext ctx = + new ObserverContext(); + ctx.prepare((RegionCoprocessorEnvironment) region.getCoprocessorHost() + .findCoprocessorEnvironment(BulkLoadObserver.class).get(0)); + + for (BulkLoadObserver bulkLoadObserver : bulkLoadObservers) { + bulkLoadObserver.preCleanupBulkLoad(ctx, request); + } + } + + fs.delete(new Path(request.getBulkToken()), true); + } + + public boolean secureBulkLoadHFiles(final Region region, + final BulkLoadHFileRequest request) throws IOException { + final List> familyPaths = new ArrayList>(request.getFamilyPathCount()); + for(ClientProtos.BulkLoadHFileRequest.FamilyPath el : request.getFamilyPathList()) { + familyPaths.add(new Pair(el.getFamily().toByteArray(), el.getPath())); + } + + Token userToken = null; + if (userProvider.isHadoopSecurityEnabled()) { + userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken() + .getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text( + request.getFsToken().getService())); + } + final String bulkToken = request.getBulkToken(); + User user = getActiveUser(); + final UserGroupInformation ugi = user.getUGI(); + if(userToken != null) { + ugi.addToken(userToken); + } else if (userProvider.isHadoopSecurityEnabled()) { + //we allow this to pass through in "simple" security mode + //for mini cluster testing + throw new DoNotRetryIOException("User token cannot be null"); + } + + boolean bypass = false; + if (region.getCoprocessorHost() != null) { + bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths); + } + boolean loaded = false; + if (!bypass) { + // Get the target fs (HBase region server fs) delegation token + // Since we have checked the permission via 'preBulkLoadHFile', now let's give + // the 'request user' necessary token to operate on the target fs. + // After this point the 'doAs' user will hold two tokens, one for the source fs + // ('request user'), another for the target fs (HBase region server principal). + if (userProvider.isHadoopSecurityEnabled()) { + FsDelegationToken targetfsDelegationToken = new FsDelegationToken(userProvider, "renewer"); + targetfsDelegationToken.acquireDelegationToken(fs); + + Token targetFsToken = targetfsDelegationToken.getUserToken(); + if (targetFsToken != null + && (userToken == null || !targetFsToken.getService().equals(userToken.getService()))) { + ugi.addToken(targetFsToken); + } + } + + loaded = ugi.doAs(new PrivilegedAction() { + @Override + public Boolean run() { + FileSystem fs = null; + try { + fs = FileSystem.get(conf); + for(Pair el: familyPaths) { + Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst())); + if(!fs.exists(stageFamily)) { + fs.mkdirs(stageFamily); + fs.setPermission(stageFamily, PERM_ALL_ACCESS); + } + } + //We call bulkLoadHFiles as requesting user + //To enable access prior to staging + return region.bulkLoadHFiles(familyPaths, true, + new SecureBulkLoadListener(fs, bulkToken, conf)); + } catch (Exception e) { + LOG.error("Failed to complete bulk load", e); + } finally { + if (fs != null) { + try { + if (!UserGroupInformation.getLoginUser().equals(ugi)) { + FileSystem.closeAllForUGI(ugi); + } + } catch (IOException e) { + LOG.error("Failed to close FileSystem for " + ugi.getUserName(), e); + } + } + } + return false; + } + }); + } + if (region.getCoprocessorHost() != null) { + loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded); + } + return loaded; + } + + private List getBulkLoadObservers(Region region) { + List coprocessorList = + region.getCoprocessorHost().findCoprocessors(BulkLoadObserver.class); + + return coprocessorList; + } + + private Path createStagingDir(Path baseDir, + User user, + TableName tableName) throws IOException { + String tblName = tableName.getNameAsString().replace(":", "_"); + String randomDir = user.getShortName()+"__"+ tblName +"__"+ + (new BigInteger(RANDOM_WIDTH, random).toString(RANDOM_RADIX)); + return createStagingDir(baseDir, user, randomDir); + } + + private Path createStagingDir(Path baseDir, + User user, + String randomDir) throws IOException { + Path p = new Path(baseDir, randomDir); + fs.mkdirs(p, PERM_ALL_ACCESS); + fs.setPermission(p, PERM_ALL_ACCESS); + return p; + } + + private User getActiveUser() throws IOException { + User user = RpcServer.getRequestUser(); + if (user == null) { + // for non-rpc handling, fallback to system user + user = userProvider.getCurrent(); + } + + //this is for testing + if (userProvider.isHadoopSecurityEnabled() + && "simple".equalsIgnoreCase(conf.get(User.HBASE_SECURITY_CONF_KEY))) { + return User.createUserForTesting(conf, user.getShortName(), new String[]{}); + } + + return user; + } + + private static class SecureBulkLoadListener implements BulkLoadListener { + // Target filesystem + private final FileSystem fs; + private final String stagingDir; + private final Configuration conf; + // Source filesystem + private FileSystem srcFs = null; + private Map origPermissions = null; + + public SecureBulkLoadListener(FileSystem fs, String stagingDir, Configuration conf) { + this.fs = fs; + this.stagingDir = stagingDir; + this.conf = conf; + this.origPermissions = new HashMap(); + } + + @Override + public String prepareBulkLoad(final byte[] family, final String srcPath) throws IOException { + Path p = new Path(srcPath); + Path stageP = new Path(stagingDir, new Path(Bytes.toString(family), p.getName())); + + // In case of Replication for bulk load files, hfiles are already copied in staging directory + if (p.equals(stageP)) { + LOG.debug(p.getName() + + " is already available in staging directory. Skipping copy or rename."); + return stageP.toString(); + } + + if (srcFs == null) { + srcFs = FileSystem.get(p.toUri(), conf); + } + + if(!isFile(p)) { + throw new IOException("Path does not reference a file: " + p); + } + + // Check to see if the source and target filesystems are the same + if (!FSHDFSUtils.isSameHdfs(conf, srcFs, fs)) { + LOG.debug("Bulk-load file " + srcPath + " is on different filesystem than " + + "the destination filesystem. Copying file over to destination staging dir."); + FileUtil.copy(srcFs, p, fs, stageP, false, conf); + } else { + LOG.debug("Moving " + p + " to " + stageP); + FileStatus origFileStatus = fs.getFileStatus(p); + origPermissions.put(srcPath, origFileStatus.getPermission()); + if(!fs.rename(p, stageP)) { + throw new IOException("Failed to move HFile: " + p + " to " + stageP); + } + } + fs.setPermission(stageP, PERM_ALL_ACCESS); + return stageP.toString(); + } + + @Override + public void doneBulkLoad(byte[] family, String srcPath) throws IOException { + LOG.debug("Bulk Load done for: " + srcPath); + } + + @Override + public void failedBulkLoad(final byte[] family, final String srcPath) throws IOException { + if (!FSHDFSUtils.isSameHdfs(conf, srcFs, fs)) { + // files are copied so no need to move them back + return; + } + Path p = new Path(srcPath); + Path stageP = new Path(stagingDir, + new Path(Bytes.toString(family), p.getName())); + + // In case of Replication for bulk load files, hfiles are not renamed by end point during + // prepare stage, so no need of rename here again + if (p.equals(stageP)) { + LOG.debug(p.getName() + " is already available in source directory. Skipping rename."); + return; + } + + LOG.debug("Moving " + stageP + " back to " + p); + if(!fs.rename(stageP, p)) + throw new IOException("Failed to move HFile: " + stageP + " to " + p); + + // restore original permission + if (origPermissions.containsKey(srcPath)) { + fs.setPermission(p, origPermissions.get(srcPath)); + } else { + LOG.warn("Can't find previous permission for path=" + srcPath); + } + } + + /** + * Check if the path is referencing a file. + * This is mainly needed to avoid symlinks. + * @param p + * @return true if the p is a file + * @throws IOException + */ + private boolean isFile(Path p) throws IOException { + FileStatus status = srcFs.getFileStatus(p); + boolean isFile = !status.isDirectory(); + try { + isFile = isFile && !(Boolean)Methods.call(FileStatus.class, status, "isSymlink", null, null); + } catch (Exception e) { + } + return isFile; + } + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index f21d8e2..7d5fc32 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -95,10 +95,10 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.regionserver.Region; @@ -2145,7 +2145,7 @@ public class AccessController extends BaseMasterAndRegionObserver */ @Override public void prePrepareBulkLoad(ObserverContext ctx, - PrepareBulkLoadRequest request) throws IOException { + PrepareBulkLoadRequest request) throws IOException { requireAccess("prePareBulkLoad", ctx.getEnvironment().getRegion().getTableDesc().getTableName(), Action.CREATE); } @@ -2159,7 +2159,7 @@ public class AccessController extends BaseMasterAndRegionObserver */ @Override public void preCleanupBulkLoad(ObserverContext ctx, - CleanupBulkLoadRequest request) throws IOException { + CleanupBulkLoadRequest request) throws IOException { requireAccess("preCleanupBulkLoad", ctx.getEnvironment().getRegion().getTableDesc().getTableName(), Action.CREATE); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java index c1f9251..cb143b7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java @@ -18,140 +18,52 @@ package org.apache.hadoop.hbase.security.access; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.DoNotRetryIOException; -import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver; +import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.coprocessor.CoprocessorService; -import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; -import org.apache.hadoop.hbase.ipc.RpcServer; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.ResponseConverter; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse; -import org.apache.hadoop.hbase.regionserver.Region; -import org.apache.hadoop.hbase.regionserver.Region.BulkLoadListener; -import org.apache.hadoop.hbase.security.SecureBulkLoadUtil; -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.security.UserProvider; -import org.apache.hadoop.hbase.security.token.FsDelegationToken; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.FSHDFSUtils; -import org.apache.hadoop.hbase.util.Methods; -import org.apache.hadoop.hbase.util.Pair; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.token.Token; - -import java.io.IOException; -import java.math.BigInteger; -import java.security.PrivilegedAction; -import java.security.SecureRandom; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService; +import org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; /** * Coprocessor service for bulk loads in secure mode. - * This coprocessor has to be installed as part of enabling - * security in HBase. - * - * This service addresses two issues: - *
    - *
  1. Moving files in a secure filesystem wherein the HBase Client - * and HBase Server are different filesystem users.
  2. - *
  3. Does moving in a secure manner. Assuming that the filesystem - * is POSIX compliant.
  4. - *
- * - * The algorithm is as follows: - *
    - *
  1. Create an hbase owned staging directory which is - * world traversable (711): {@code /hbase/staging}
  2. - *
  3. A user writes out data to his secure output directory: {@code /user/foo/data}
  4. - *
  5. A call is made to hbase to create a secret staging directory - * which globally rwx (777): {@code /user/staging/averylongandrandomdirectoryname}
  6. - *
  7. The user moves the data into the random staging directory, - * then calls bulkLoadHFiles()
  8. - *
- * Like delegation tokens the strength of the security lies in the length - * and randomness of the secret directory. - * + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 */ @InterfaceAudience.Private +@Deprecated public class SecureBulkLoadEndpoint extends SecureBulkLoadService implements CoprocessorService, Coprocessor { public static final long VERSION = 0L; - //320/5 = 64 characters - private static final int RANDOM_WIDTH = 320; - private static final int RANDOM_RADIX = 32; - private static final Log LOG = LogFactory.getLog(SecureBulkLoadEndpoint.class); - private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx"); - private final static FsPermission PERM_HIDDEN = FsPermission.valueOf("-rwx--x--x"); - - private SecureRandom random; - private FileSystem fs; - private Configuration conf; - - //two levels so it doesn't get deleted accidentally - //no sticky bit in Hadoop 1.0 - private Path baseStagingDir; - private RegionCoprocessorEnvironment env; - private UserProvider userProvider; - @Override public void start(CoprocessorEnvironment env) { this.env = (RegionCoprocessorEnvironment)env; - random = new SecureRandom(); - conf = env.getConfiguration(); - baseStagingDir = SecureBulkLoadUtil.getBaseStagingDir(conf); - this.userProvider = UserProvider.instantiate(conf); - - try { - fs = FileSystem.get(conf); - fs.mkdirs(baseStagingDir, PERM_HIDDEN); - fs.setPermission(baseStagingDir, PERM_HIDDEN); - //no sticky bit in hadoop-1.0, making directory nonempty so it never gets erased - fs.mkdirs(new Path(baseStagingDir,"DONOTERASE"), PERM_HIDDEN); - FileStatus status = fs.getFileStatus(baseStagingDir); - if(status == null) { - throw new IllegalStateException("Failed to create staging directory"); - } - if(!status.getPermission().equals(PERM_HIDDEN)) { - throw new IllegalStateException( - "Directory already exists but permissions aren't set to '-rwx--x--x' "); - } - } catch (IOException e) { - throw new IllegalStateException("Failed to get FileSystem instance",e); - } + LOG.warn("SecureBulkLoadEndpoint is deprecated. It will be removed in future releases."); + LOG.warn("Secure bulk load has been integrated into HBase core."); } @Override @@ -159,24 +71,12 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService } @Override - public void prepareBulkLoad(RpcController controller, - PrepareBulkLoadRequest request, - RpcCallback done){ + public void prepareBulkLoad(RpcController controller, PrepareBulkLoadRequest request, + RpcCallback done) { try { - List bulkLoadObservers = getBulkLoadObservers(); - - if(bulkLoadObservers != null) { - ObserverContext ctx = - new ObserverContext(); - ctx.prepare(env); - - for(BulkLoadObserver bulkLoadObserver : bulkLoadObservers) { - bulkLoadObserver.prePrepareBulkLoad(ctx, request); - } - } - - String bulkToken = createStagingDir(baseStagingDir, - getActiveUser(), ProtobufUtil.toTableName(request.getTableName())).toString(); + SecureBulkLoadManager secureBulkLoadManager = + this.env.getRegionServerServices().getSecureBulkLoadManager(); + String bulkToken = secureBulkLoadManager.prepareBulkLoad(this.env.getRegion(), request); done.run(PrepareBulkLoadResponse.newBuilder().setBulkToken(bulkToken).build()); } catch (IOException e) { ResponseConverter.setControllerException(controller, e); @@ -185,23 +85,12 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService } @Override - public void cleanupBulkLoad(RpcController controller, - CleanupBulkLoadRequest request, - RpcCallback done) { + public void cleanupBulkLoad(RpcController controller, CleanupBulkLoadRequest request, + RpcCallback done) { try { - List bulkLoadObservers = getBulkLoadObservers(); - - if(bulkLoadObservers != null) { - ObserverContext ctx = - new ObserverContext(); - ctx.prepare(env); - - for(BulkLoadObserver bulkLoadObserver : bulkLoadObservers) { - bulkLoadObserver.preCleanupBulkLoad(ctx, request); - } - } - - fs.delete(new Path(request.getBulkToken()), true); + SecureBulkLoadManager secureBulkLoadManager = + this.env.getRegionServerServices().getSecureBulkLoadManager(); + secureBulkLoadManager.cleanupBulkLoad(this.env.getRegion(), request); done.run(CleanupBulkLoadResponse.newBuilder().build()); } catch (IOException e) { ResponseConverter.setControllerException(controller, e); @@ -210,262 +99,35 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService } @Override - public void secureBulkLoadHFiles(RpcController controller, - SecureBulkLoadHFilesRequest request, - RpcCallback done) { - final List> familyPaths = new ArrayList>(); - for(ClientProtos.BulkLoadHFileRequest.FamilyPath el : request.getFamilyPathList()) { - familyPaths.add(new Pair(el.getFamily().toByteArray(),el.getPath())); - } - - Token userToken = null; - if (userProvider.isHadoopSecurityEnabled()) { - userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken() - .getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text( - request.getFsToken().getService())); - } - final String bulkToken = request.getBulkToken(); - User user = getActiveUser(); - final UserGroupInformation ugi = user.getUGI(); - if(userToken != null) { - ugi.addToken(userToken); - } else if (userProvider.isHadoopSecurityEnabled()) { - //we allow this to pass through in "simple" security mode - //for mini cluster testing - ResponseConverter.setControllerException(controller, - new DoNotRetryIOException("User token cannot be null")); - done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build()); - return; - } - - Region region = env.getRegion(); - boolean bypass = false; - if (region.getCoprocessorHost() != null) { - try { - bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths); - } catch (IOException e) { - ResponseConverter.setControllerException(controller, e); - done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build()); - return; - } - } + public void secureBulkLoadHFiles(RpcController controller, SecureBulkLoadHFilesRequest request, + RpcCallback done) { boolean loaded = false; - if (!bypass) { - // Get the target fs (HBase region server fs) delegation token - // Since we have checked the permission via 'preBulkLoadHFile', now let's give - // the 'request user' necessary token to operate on the target fs. - // After this point the 'doAs' user will hold two tokens, one for the source fs - // ('request user'), another for the target fs (HBase region server principal). - if (userProvider.isHadoopSecurityEnabled()) { - FsDelegationToken targetfsDelegationToken = new FsDelegationToken(userProvider, "renewer"); - try { - targetfsDelegationToken.acquireDelegationToken(fs); - } catch (IOException e) { - ResponseConverter.setControllerException(controller, e); - done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build()); - return; - } - Token targetFsToken = targetfsDelegationToken.getUserToken(); - if (targetFsToken != null - && (userToken == null || !targetFsToken.getService().equals(userToken.getService()))) { - ugi.addToken(targetFsToken); - } - } - - loaded = ugi.doAs(new PrivilegedAction() { - @Override - public Boolean run() { - FileSystem fs = null; - try { - Configuration conf = env.getConfiguration(); - fs = FileSystem.get(conf); - for(Pair el: familyPaths) { - Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst())); - if(!fs.exists(stageFamily)) { - fs.mkdirs(stageFamily); - fs.setPermission(stageFamily, PERM_ALL_ACCESS); - } - } - //We call bulkLoadHFiles as requesting user - //To enable access prior to staging - return env.getRegion().bulkLoadHFiles(familyPaths, true, - new SecureBulkLoadListener(fs, bulkToken, conf)); - } catch (Exception e) { - LOG.error("Failed to complete bulk load", e); - } finally { - if (fs != null) { - try { - if (!UserGroupInformation.getLoginUser().equals(ugi)) { - FileSystem.closeAllForUGI(ugi); - } - } catch (IOException e) { - LOG.error("Failed to close FileSystem for " + ugi.getUserName(), e); - } - } - } - return false; - } - }); - } - if (region.getCoprocessorHost() != null) { - try { - loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded); - } catch (IOException e) { - ResponseConverter.setControllerException(controller, e); - done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build()); - return; - } + try { + SecureBulkLoadManager secureBulkLoadManager = + this.env.getRegionServerServices().getSecureBulkLoadManager(); + BulkLoadHFileRequest bulkLoadHFileRequest = ConvertSecureBulkLoadHFilesRequest(request); + loaded = secureBulkLoadManager.secureBulkLoadHFiles(this.env.getRegion(), bulkLoadHFileRequest); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); } done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(loaded).build()); } - private List getBulkLoadObservers() { - List coprocessorList = - this.env.getRegion().getCoprocessorHost().findCoprocessors(BulkLoadObserver.class); - - return coprocessorList; - } - - private Path createStagingDir(Path baseDir, - User user, - TableName tableName) throws IOException { - String tblName = tableName.getNameAsString().replace(":", "_"); - String randomDir = user.getShortName()+"__"+ tblName +"__"+ - (new BigInteger(RANDOM_WIDTH, random).toString(RANDOM_RADIX)); - return createStagingDir(baseDir, user, randomDir); - } - - private Path createStagingDir(Path baseDir, - User user, - String randomDir) throws IOException { - Path p = new Path(baseDir, randomDir); - fs.mkdirs(p, PERM_ALL_ACCESS); - fs.setPermission(p, PERM_ALL_ACCESS); - return p; - } - - private User getActiveUser() { - User user = RpcServer.getRequestUser(); - if (user == null) { - return null; - } - - //this is for testing - if (userProvider.isHadoopSecurityEnabled() - && "simple".equalsIgnoreCase(conf.get(User.HBASE_SECURITY_CONF_KEY))) { - return User.createUserForTesting(conf, user.getShortName(), new String[]{}); - } - - return user; + private BulkLoadHFileRequest ConvertSecureBulkLoadHFilesRequest( + SecureBulkLoadHFilesRequest request) { + BulkLoadHFileRequest.Builder bulkLoadHFileRequest = BulkLoadHFileRequest.newBuilder(); + RegionSpecifier region = + RequestConverter.buildRegionSpecifier(RegionSpecifierType.REGION_NAME, this.env + .getRegionInfo().getRegionName()); + + bulkLoadHFileRequest.setRegion(region).setFsToken(request.getFsToken()) + .setBulkToken(request.getBulkToken()).setAssignSeqNum(request.getAssignSeqNum()) + .addAllFamilyPath(request.getFamilyPathList()); + return bulkLoadHFileRequest.build(); } @Override public Service getService() { return this; } - - private static class SecureBulkLoadListener implements BulkLoadListener { - // Target filesystem - private FileSystem fs; - private String stagingDir; - private Configuration conf; - // Source filesystem - private FileSystem srcFs = null; - private Map origPermissions = null; - - public SecureBulkLoadListener(FileSystem fs, String stagingDir, Configuration conf) { - this.fs = fs; - this.stagingDir = stagingDir; - this.conf = conf; - this.origPermissions = new HashMap(); - } - - @Override - public String prepareBulkLoad(final byte[] family, final String srcPath) throws IOException { - Path p = new Path(srcPath); - Path stageP = new Path(stagingDir, new Path(Bytes.toString(family), p.getName())); - - // In case of Replication for bulk load files, hfiles are already copied in staging directory - if (p.equals(stageP)) { - LOG.debug(p.getName() - + " is already available in staging directory. Skipping copy or rename."); - return stageP.toString(); - } - - if (srcFs == null) { - srcFs = FileSystem.get(p.toUri(), conf); - } - - if(!isFile(p)) { - throw new IOException("Path does not reference a file: " + p); - } - - // Check to see if the source and target filesystems are the same - if (!FSHDFSUtils.isSameHdfs(conf, srcFs, fs)) { - LOG.debug("Bulk-load file " + srcPath + " is on different filesystem than " + - "the destination filesystem. Copying file over to destination staging dir."); - FileUtil.copy(srcFs, p, fs, stageP, false, conf); - } else { - LOG.debug("Moving " + p + " to " + stageP); - FileStatus origFileStatus = fs.getFileStatus(p); - origPermissions.put(srcPath, origFileStatus.getPermission()); - if(!fs.rename(p, stageP)) { - throw new IOException("Failed to move HFile: " + p + " to " + stageP); - } - } - fs.setPermission(stageP, PERM_ALL_ACCESS); - return stageP.toString(); - } - - @Override - public void doneBulkLoad(byte[] family, String srcPath) throws IOException { - LOG.debug("Bulk Load done for: " + srcPath); - } - - @Override - public void failedBulkLoad(final byte[] family, final String srcPath) throws IOException { - if (!FSHDFSUtils.isSameHdfs(conf, srcFs, fs)) { - // files are copied so no need to move them back - return; - } - Path p = new Path(srcPath); - Path stageP = new Path(stagingDir, - new Path(Bytes.toString(family), p.getName())); - - // In case of Replication for bulk load files, hfiles are not renamed by end point during - // prepare stage, so no need of rename here again - if (p.equals(stageP)) { - LOG.debug(p.getName() + " is already available in source directory. Skipping rename."); - return; - } - - LOG.debug("Moving " + stageP + " back to " + p); - if(!fs.rename(stageP, p)) - throw new IOException("Failed to move HFile: " + stageP + " to " + p); - - // restore original permission - if (origPermissions.containsKey(srcPath)) { - fs.setPermission(p, origPermissions.get(srcPath)); - } else { - LOG.warn("Can't find previous permission for path=" + srcPath); - } - } - - /** - * Check if the path is referencing a file. - * This is mainly needed to avoid symlinks. - * @param p - * @return true if the p is a file - * @throws IOException - */ - private boolean isFile(Path p) throws IOException { - FileStatus status = srcFs.getFileStatus(p); - boolean isFile = !status.isDirectory(); - try { - isFile = isFile && !(Boolean)Methods.call(FileStatus.class, status, "isSymlink", null, null); - } catch (Exception e) { - } - return isFile; - } - } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java index 6cd1963..6f225d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.regionserver.MetricsRegionServer; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionServerAccounting; import org.apache.hadoop.hbase.regionserver.RegionServerServices; +import org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager; import org.apache.hadoop.hbase.regionserver.ServerNonceManager; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.util.Bytes; @@ -316,7 +317,6 @@ public class MockRegionServerServices implements RegionServerServices { @Override public ClusterConnection getClusterConnection() { - // TODO Auto-generated method stub return null; } @@ -334,4 +334,9 @@ public class MockRegionServerServices implements RegionServerServices { public MetricsRegionServer getMetrics() { return null; } + + @Override + public SecureBulkLoadManager getSecureBulkLoadManager() { + return null; + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index 48d7efc..354f0a8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -35,8 +35,6 @@ import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; -import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore; import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; @@ -336,16 +334,21 @@ public class TestReplicaWithCluster { LOG.debug("Loading test data"); @SuppressWarnings("deprecation") final ClusterConnection conn = (ClusterConnection) HTU.getAdmin().getConnection(); + table = conn.getTable(hdt.getTableName()); + final String bulkToken = new SecureBulkLoadClient(table).prepareBulkLoad(conn); RegionServerCallable callable = new RegionServerCallable( conn, hdt.getTableName(), TestHRegionServerBulkLoad.rowkey(0)) { @Override public Void call(int timeout) throws Exception { LOG.debug("Going to connect to server " + getLocation() + " for row " + Bytes.toStringBinary(getRow())); + SecureBulkLoadClient secureClient = null; byte[] regionName = getLocation().getRegionInfo().getRegionName(); - BulkLoadHFileRequest request = - RequestConverter.buildBulkLoadHFileRequest(famPaths, regionName, true); - getStub().bulkLoadHFile(null, request); + try (Table table = conn.getTable(getTableName())) { + secureClient = new SecureBulkLoadClient(table); + secureClient.secureBulkLoadHFiles(getStub(), famPaths, regionName, + true, null, bulkToken); + } return null; } }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesUseSecurityEndPoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesUseSecurityEndPoint.java deleted file mode 100644 index 11627a1..0000000 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesUseSecurityEndPoint.java +++ /dev/null @@ -1,45 +0,0 @@ -/** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.mapreduce; - -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; -import org.apache.hadoop.hbase.testclassification.LargeTests; -import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; -import org.junit.BeforeClass; -import org.junit.experimental.categories.Category; - -@Category(LargeTests.class) -public class TestLoadIncrementalHFilesUseSecurityEndPoint extends TestLoadIncrementalHFiles { - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - util.getConfiguration().setInt(LoadIncrementalHFiles.MAX_FILES_PER_REGION_PER_FAMILY, - MAX_FILES_PER_REGION_PER_FAMILY); - util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, - "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint"); - // change default behavior so that tag values are returned with normal rpcs - util.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY, - KeyValueCodecWithTags.class.getCanonicalName()); - - util.startMiniCluster(); - setupNamespace(); - } -} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 69f2e35..2927023 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -83,6 +83,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodes import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; @@ -90,6 +92,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode; @@ -103,6 +107,7 @@ import org.apache.hadoop.hbase.regionserver.MetricsRegionServer; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionServerAccounting; import org.apache.hadoop.hbase.regionserver.RegionServerServices; +import org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager; import org.apache.hadoop.hbase.regionserver.ServerNonceManager; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.util.Bytes; @@ -661,7 +666,6 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices { @Override public ClusterConnection getClusterConnection() { - // TODO Auto-generated method stub return null; } @@ -679,4 +683,21 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices { public MetricsRegionServer getMetrics() { return null; } -} \ No newline at end of file + + @Override + public PrepareBulkLoadResponse prepareBulkLoad(RpcController controller, + PrepareBulkLoadRequest request) throws ServiceException { + return null; + } + + @Override + public CleanupBulkLoadResponse cleanupBulkLoad(RpcController controller, + CleanupBulkLoadRequest request) throws ServiceException { + return null; + } + + @Override + public SecureBulkLoadManager getSecureBulkLoadManager() { + return null; + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java new file mode 100644 index 0000000..9ecc5d6 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java @@ -0,0 +1,172 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.regionserver; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos; +import org.apache.hadoop.hbase.security.SecureBulkLoadUtil; +import org.apache.hadoop.hbase.util.ByteStringer; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.security.token.Token; + +/** + * Client proxy for SecureBulkLoadProtocol used in conjunction with SecureBulkLoadEndpoint + * @deprecated Use for backward compatibility testing only. Will be removed when + * SecureBulkLoadEndpoint is not supported. + */ +@InterfaceAudience.Private +public class SecureBulkLoadEndpointClient { + private Table table; + + public SecureBulkLoadEndpointClient(Table table) { + this.table = table; + } + + public String prepareBulkLoad(final TableName tableName) throws IOException { + try { + CoprocessorRpcChannel channel = table.coprocessorService(HConstants.EMPTY_START_ROW); + SecureBulkLoadProtos.SecureBulkLoadService instance = + ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel); + + ServerRpcController controller = new ServerRpcController(); + + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + PrepareBulkLoadRequest request = + PrepareBulkLoadRequest.newBuilder() + .setTableName(ProtobufUtil.toProtoTableName(tableName)).build(); + + instance.prepareBulkLoad(controller, + request, + rpcCallback); + + PrepareBulkLoadResponse response = rpcCallback.get(); + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + + return response.getBulkToken(); + } catch (Throwable throwable) { + throw new IOException(throwable); + } + } + + public void cleanupBulkLoad(final String bulkToken) throws IOException { + try { + CoprocessorRpcChannel channel = table.coprocessorService(HConstants.EMPTY_START_ROW); + SecureBulkLoadProtos.SecureBulkLoadService instance = + ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel); + + ServerRpcController controller = new ServerRpcController(); + + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + CleanupBulkLoadRequest request = + CleanupBulkLoadRequest.newBuilder() + .setBulkToken(bulkToken).build(); + + instance.cleanupBulkLoad(controller, + request, + rpcCallback); + + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + } catch (Throwable throwable) { + throw new IOException(throwable); + } + } + + public boolean bulkLoadHFiles(final List> familyPaths, + final Token userToken, + final String bulkToken, + final byte[] startRow) throws IOException { + // we never want to send a batch of HFiles to all regions, thus cannot call + // HTable#coprocessorService methods that take start and end rowkeys; see HBASE-9639 + try { + CoprocessorRpcChannel channel = table.coprocessorService(startRow); + SecureBulkLoadProtos.SecureBulkLoadService instance = + ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel); + + DelegationToken protoDT = + DelegationToken.newBuilder().build(); + if(userToken != null) { + protoDT = + DelegationToken.newBuilder() + .setIdentifier(ByteStringer.wrap(userToken.getIdentifier())) + .setPassword(ByteStringer.wrap(userToken.getPassword())) + .setKind(userToken.getKind().toString()) + .setService(userToken.getService().toString()).build(); + } + + List protoFamilyPaths = + new ArrayList(); + for(Pair el: familyPaths) { + protoFamilyPaths.add(ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() + .setFamily(ByteStringer.wrap(el.getFirst())) + .setPath(el.getSecond()).build()); + } + + SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request = + SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.newBuilder() + .setFsToken(protoDT) + .addAllFamilyPath(protoFamilyPaths) + .setBulkToken(bulkToken).build(); + + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + instance.secureBulkLoadHFiles(controller, + request, + rpcCallback); + + SecureBulkLoadProtos.SecureBulkLoadHFilesResponse response = rpcCallback.get(); + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + return response.getLoaded(); + } catch (Throwable throwable) { + throw new IOException(throwable); + } + } + + public Path getStagingPath(String bulkToken, byte[] family) throws IOException { + return SecureBulkLoadUtil.getStagingPath(table.getConfiguration(), bulkToken, family); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index bd5c91e..6e68201 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -26,7 +26,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.NavigableMap; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; @@ -52,6 +51,7 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.RpcRetryingCaller; import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.SecureBulkLoadClient; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; @@ -65,7 +65,6 @@ import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.regionserver.wal.TestWALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -91,15 +90,15 @@ import com.google.common.collect.Lists; @Category({RegionServerTests.class, LargeTests.class}) public class TestHRegionServerBulkLoad { private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoad.class); - private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private final static Configuration conf = UTIL.getConfiguration(); - private final static byte[] QUAL = Bytes.toBytes("qual"); - private final static int NUM_CFS = 10; + protected static HBaseTestingUtility UTIL = new HBaseTestingUtility(); + protected final static Configuration conf = UTIL.getConfiguration(); + protected final static byte[] QUAL = Bytes.toBytes("qual"); + protected final static int NUM_CFS = 10; private int sleepDuration; public static int BLOCKSIZE = 64 * 1024; public static Algorithm COMPRESSION = Compression.Algorithm.NONE; - private final static byte[][] families = new byte[NUM_CFS][]; + protected final static byte[][] families = new byte[NUM_CFS][]; static { for (int i = 0; i < NUM_CFS; i++) { families[i] = Bytes.toBytes(family(i)); @@ -200,16 +199,21 @@ public class TestHRegionServerBulkLoad { // bulk load HFiles final ClusterConnection conn = (ClusterConnection) UTIL.getAdmin().getConnection(); + Table table = conn.getTable(tableName); + final String bulkToken = new SecureBulkLoadClient(table).prepareBulkLoad(conn); RegionServerCallable callable = new RegionServerCallable(conn, tableName, Bytes.toBytes("aaa")) { @Override public Void call(int callTimeout) throws Exception { LOG.debug("Going to connect to server " + getLocation() + " for row " + Bytes.toStringBinary(getRow())); + SecureBulkLoadClient secureClient = null; byte[] regionName = getLocation().getRegionInfo().getRegionName(); - BulkLoadHFileRequest request = - RequestConverter.buildBulkLoadHFileRequest(famPaths, regionName, true); - getStub().bulkLoadHFile(null, request); + try (Table table = conn.getTable(getTableName())) { + secureClient = new SecureBulkLoadClient(table); + secureClient.secureBulkLoadHFiles(getStub(), famPaths, regionName, + true, null, bulkToken); + } return null; } }; @@ -320,7 +324,7 @@ public class TestHRegionServerBulkLoad { * Creates a table with given table name and specified number of column * families if the table does not already exist. */ - private void setupTable(TableName table, int cfs) throws IOException { + public void setupTable(TableName table, int cfs) throws IOException { try { LOG.info("Creating table " + table); HTableDescriptor htd = new HTableDescriptor(table); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java new file mode 100644 index 0000000..d55adef --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java @@ -0,0 +1,162 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.regionserver; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread; +import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.ClusterConnection; +import org.apache.hadoop.hbase.client.RegionServerCallable; +import org.apache.hadoop.hbase.client.RpcRetryingCaller; +import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import com.google.common.collect.Lists; + +/** + * Tests bulk loading of HFiles with old non-secure client for backward compatibility. Will be + * removed when old non-secure client for backward compatibility is not supported. + */ +@RunWith(Parameterized.class) +@Category({RegionServerTests.class, LargeTests.class}) +public class TestHRegionServerBulkLoadWithOldClient extends TestHRegionServerBulkLoad { + public TestHRegionServerBulkLoadWithOldClient(int duration) { + super(duration); + } + + private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoadWithOldClient.class); + + public static class AtomicHFileLoader extends RepeatingTestThread { + final AtomicLong numBulkLoads = new AtomicLong(); + final AtomicLong numCompactions = new AtomicLong(); + private TableName tableName; + + public AtomicHFileLoader(TableName tableName, TestContext ctx, + byte targetFamilies[][]) throws IOException { + super(ctx); + this.tableName = tableName; + } + + public void doAnAction() throws Exception { + long iteration = numBulkLoads.getAndIncrement(); + Path dir = UTIL.getDataTestDirOnTestFS(String.format("bulkLoad_%08d", + iteration)); + + // create HFiles for different column families + FileSystem fs = UTIL.getTestFileSystem(); + byte[] val = Bytes.toBytes(String.format("%010d", iteration)); + final List> famPaths = new ArrayList>( + NUM_CFS); + for (int i = 0; i < NUM_CFS; i++) { + Path hfile = new Path(dir, family(i)); + byte[] fam = Bytes.toBytes(family(i)); + createHFile(fs, hfile, fam, QUAL, val, 1000); + famPaths.add(new Pair<>(fam, hfile.toString())); + } + + // bulk load HFiles + final ClusterConnection conn = (ClusterConnection) UTIL.getAdmin().getConnection(); + RegionServerCallable callable = + new RegionServerCallable(conn, tableName, Bytes.toBytes("aaa")) { + @Override + public Void call(int callTimeout) throws Exception { + LOG.info("Non-secure old client"); + byte[] regionName = getLocation().getRegionInfo().getRegionName(); + BulkLoadHFileRequest request = + RequestConverter + .buildBulkLoadHFileRequest(famPaths, regionName, true, null, null); + getStub().bulkLoadHFile(null, request); + return null; + } + }; + RpcRetryingCallerFactory factory = new RpcRetryingCallerFactory(conf); + RpcRetryingCaller caller = factory. newCaller(); + caller.callWithRetries(callable, Integer.MAX_VALUE); + + // Periodically do compaction to reduce the number of open file handles. + if (numBulkLoads.get() % 5 == 0) { + // 5 * 50 = 250 open file handles! + callable = new RegionServerCallable(conn, tableName, Bytes.toBytes("aaa")) { + @Override + public Void call(int callTimeout) throws Exception { + LOG.debug("compacting " + getLocation() + " for row " + + Bytes.toStringBinary(getRow())); + AdminProtos.AdminService.BlockingInterface server = + conn.getAdmin(getLocation().getServerName()); + CompactRegionRequest request = + RequestConverter.buildCompactRegionRequest( + getLocation().getRegionInfo().getRegionName(), true, null); + server.compactRegion(null, request); + numCompactions.incrementAndGet(); + return null; + } + }; + caller.callWithRetries(callable, Integer.MAX_VALUE); + } + } + } + + void runAtomicBulkloadTest(TableName tableName, int millisToRun, int numScanners) + throws Exception { + setupTable(tableName, 10); + + TestContext ctx = new TestContext(UTIL.getConfiguration()); + + AtomicHFileLoader loader = new AtomicHFileLoader(tableName, ctx, null); + ctx.addThread(loader); + + List scanners = Lists.newArrayList(); + for (int i = 0; i < numScanners; i++) { + AtomicScanReader scanner = new AtomicScanReader(tableName, ctx, families); + scanners.add(scanner); + ctx.addThread(scanner); + } + + ctx.startThreads(); + ctx.waitFor(millisToRun); + ctx.stop(); + + LOG.info("Loaders:"); + LOG.info(" loaded " + loader.numBulkLoads.get()); + LOG.info(" compations " + loader.numCompactions.get()); + + LOG.info("Scanners:"); + for (AtomicScanReader scanner : scanners) { + LOG.info(" scanned " + scanner.numScans.get()); + LOG.info(" verified " + scanner.numRowsScanned.get() + " rows"); + } + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java new file mode 100644 index 0000000..6de6261 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java @@ -0,0 +1,177 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.regionserver; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread; +import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.ClusterConnection; +import org.apache.hadoop.hbase.client.RegionServerCallable; +import org.apache.hadoop.hbase.client.RpcRetryingCaller; +import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; +import org.apache.hadoop.hbase.client.SecureBulkLoadClient; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.junit.BeforeClass; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import com.google.common.collect.Lists; + +/** + * Tests bulk loading of HFiles with old secure Endpoint client for backward compatibility. Will be + * removed when old non-secure client for backward compatibility is not supported. + */ +@RunWith(Parameterized.class) +@Category({RegionServerTests.class, LargeTests.class}) +public class TestHRegionServerBulkLoadWithOldSecureEndpoint extends TestHRegionServerBulkLoad { + public TestHRegionServerBulkLoadWithOldSecureEndpoint(int duration) { + super(duration); + } + + private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoadWithOldSecureEndpoint.class); + + @BeforeClass + public static void setUpBeforeClass() throws IOException { + conf.setInt("hbase.rpc.timeout", 10 * 1000); + conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, + "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint"); + } + + public static class AtomicHFileLoader extends RepeatingTestThread { + final AtomicLong numBulkLoads = new AtomicLong(); + final AtomicLong numCompactions = new AtomicLong(); + private TableName tableName; + + public AtomicHFileLoader(TableName tableName, TestContext ctx, + byte targetFamilies[][]) throws IOException { + super(ctx); + this.tableName = tableName; + } + + public void doAnAction() throws Exception { + long iteration = numBulkLoads.getAndIncrement(); + Path dir = UTIL.getDataTestDirOnTestFS(String.format("bulkLoad_%08d", + iteration)); + + // create HFiles for different column families + FileSystem fs = UTIL.getTestFileSystem(); + byte[] val = Bytes.toBytes(String.format("%010d", iteration)); + final List> famPaths = new ArrayList>( + NUM_CFS); + for (int i = 0; i < NUM_CFS; i++) { + Path hfile = new Path(dir, family(i)); + byte[] fam = Bytes.toBytes(family(i)); + createHFile(fs, hfile, fam, QUAL, val, 1000); + famPaths.add(new Pair<>(fam, hfile.toString())); + } + + // bulk load HFiles + final ClusterConnection conn = (ClusterConnection) UTIL.getAdmin().getConnection(); + Table table = conn.getTable(tableName); + final String bulkToken = new SecureBulkLoadEndpointClient(table).prepareBulkLoad(tableName); + RegionServerCallable callable = + new RegionServerCallable(conn, tableName, Bytes.toBytes("aaa")) { + @Override + public Void call(int callTimeout) throws Exception { + LOG.debug("Going to connect to server " + getLocation() + " for row " + + Bytes.toStringBinary(getRow())); + try (Table table = conn.getTable(getTableName())) { + boolean loaded = + new SecureBulkLoadEndpointClient(table).bulkLoadHFiles(famPaths, null, + bulkToken, getLocation().getRegionInfo().getStartKey()); + } + return null; + } + }; + RpcRetryingCallerFactory factory = new RpcRetryingCallerFactory(conf); + RpcRetryingCaller caller = factory. newCaller(); + caller.callWithRetries(callable, Integer.MAX_VALUE); + + // Periodically do compaction to reduce the number of open file handles. + if (numBulkLoads.get() % 5 == 0) { + // 5 * 50 = 250 open file handles! + callable = new RegionServerCallable(conn, tableName, Bytes.toBytes("aaa")) { + @Override + public Void call(int callTimeout) throws Exception { + LOG.debug("compacting " + getLocation() + " for row " + + Bytes.toStringBinary(getRow())); + AdminProtos.AdminService.BlockingInterface server = + conn.getAdmin(getLocation().getServerName()); + CompactRegionRequest request = + RequestConverter.buildCompactRegionRequest( + getLocation().getRegionInfo().getRegionName(), true, null); + server.compactRegion(null, request); + numCompactions.incrementAndGet(); + return null; + } + }; + caller.callWithRetries(callable, Integer.MAX_VALUE); + } + } + } + + void runAtomicBulkloadTest(TableName tableName, int millisToRun, int numScanners) + throws Exception { + setupTable(tableName, 10); + + TestContext ctx = new TestContext(UTIL.getConfiguration()); + + AtomicHFileLoader loader = new AtomicHFileLoader(tableName, ctx, null); + ctx.addThread(loader); + + List scanners = Lists.newArrayList(); + for (int i = 0; i < numScanners; i++) { + AtomicScanReader scanner = new AtomicScanReader(tableName, ctx, families); + scanners.add(scanner); + ctx.addThread(scanner); + } + + ctx.startThreads(); + ctx.waitFor(millisToRun); + ctx.stop(); + + LOG.info("Loaders:"); + LOG.info(" loaded " + loader.numBulkLoads.get()); + LOG.info(" compations " + loader.numCompactions.get()); + + LOG.info("Scanners:"); + for (AtomicScanReader scanner : scanners) { + LOG.info(" scanned " + scanner.numScans.get()); + LOG.info(" verified " + scanner.numRowsScanned.get() + " rows"); + } + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java index edad059..0e60877 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java @@ -24,7 +24,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -32,6 +31,7 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoordinatedStateManagerFactory; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.CoordinatedStateManager; @@ -61,6 +61,8 @@ public class TestPriorityRpc { public void setup() { Configuration conf = HBaseConfiguration.create(); conf.setBoolean("hbase.testing.nocluster", true); // No need to do ZK + final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(conf); + TEST_UTIL.getDataTestDir(this.getClass().getName()); CoordinatedStateManager cp = CoordinatedStateManagerFactory.getCoordinatedStateManager(conf); regionServer = HRegionServer.constructRegionServer(HRegionServer.class, conf, cp); priority = regionServer.rpcServices.getPriority(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index f0e7ac9..274fe37 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -99,8 +99,7 @@ public class SecureTestUtil { conf.set("hadoop.security.authentication", "simple"); conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName() + "," + MasterSyncObserver.class.getName()); - conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName() + - "," + SecureBulkLoadEndpoint.class.getName()); + conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName()); conf.set(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, AccessController.class.getName()); // Need HFile V3 for tags for security features conf.setInt(HFile.FORMAT_VERSION_KEY, 3);