Index: hbase-protocol/src/main/protobuf/HBase.proto =================================================================== --- hbase-protocol/src/main/protobuf/HBase.proto (revision 0) +++ hbase-protocol/src/main/protobuf/HBase.proto (revision 1522038) @@ -0,0 +1,187 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are shared throughout HBase + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "HBaseProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "Cell.proto"; + +/** + * Table Name + */ +message TableName { + required bytes namespace = 1; + required bytes qualifier = 2; +} + +/** + * Table Schema + * Inspired by the rest TableSchema + */ +message TableSchema { + optional TableName table_name = 1; + repeated BytesBytesPair attributes = 2; + repeated ColumnFamilySchema column_families = 3; + repeated NameStringPair configuration = 4; +} + +/** + * Column Family Schema + * Inspired by the rest ColumSchemaMessage + */ +message ColumnFamilySchema { + required bytes name = 1; + repeated BytesBytesPair attributes = 2; + repeated NameStringPair configuration = 3; +} + +/** + * Protocol buffer version of HRegionInfo. + */ +message RegionInfo { + required uint64 region_id = 1; + required TableName table_name = 2; + optional bytes start_key = 3; + optional bytes end_key = 4; + optional bool offline = 5; + optional bool split = 6; +} + +/** + * Protocol buffer for favored nodes + */ +message FavoredNodes { + repeated ServerName favored_node = 1; +} + +/** + * Container protocol buffer to specify a region. + * You can specify region by region name, or the hash + * of the region name, which is known as encoded + * region name. + */ +message RegionSpecifier { + required RegionSpecifierType type = 1; + required bytes value = 2; + + enum RegionSpecifierType { + // ,,. + REGION_NAME = 1; + + // hash of ,, + ENCODED_REGION_NAME = 2; + } +} + +/** + * A range of time. Both from and to are Java time + * stamp in milliseconds. If you don't specify a time + * range, it means all time. By default, if not + * specified, from = 0, and to = Long.MAX_VALUE + */ +message TimeRange { + optional uint64 from = 1; + optional uint64 to = 2; +} + +/* Comparison operators */ +enum CompareType { + LESS = 0; + LESS_OR_EQUAL = 1; + EQUAL = 2; + NOT_EQUAL = 3; + GREATER_OR_EQUAL = 4; + GREATER = 5; + NO_OP = 6; +} + +/** + * Protocol buffer version of ServerName + */ +message ServerName { + required string host_name = 1; + optional uint32 port = 2; + optional uint64 start_code = 3; +} + +// Comment data structures + +message Coprocessor { + required string name = 1; +} + +message NameStringPair { + required string name = 1; + required string value = 2; +} + +message NameBytesPair { + required string name = 1; + optional bytes value = 2; +} + +message BytesBytesPair { + required bytes first = 1; + required bytes second = 2; +} + +message NameInt64Pair { + optional string name = 1; + optional int64 value = 2; +} + +/** + * Description of the snapshot to take + */ +message SnapshotDescription { + required string name = 1; + optional string table = 2; // not needed for delete, but checked for in taking snapshot + optional int64 creation_time = 3 [default = 0]; + enum Type { + DISABLED = 0; + FLUSH = 1; + } + optional Type type = 4 [default = FLUSH]; + optional int32 version = 5; +} + +message EmptyMsg { +} + +message LongMsg { + required int64 long_msg = 1; + +} + +message BigDecimalMsg { + required bytes bigdecimal_msg = 1; +} + +message UUID { + required uint64 least_sig_bits = 1; + required uint64 most_sig_bits = 2; +} + +message NamespaceDescriptor { + required bytes name = 1; + repeated NameStringPair configuration = 2; +} Index: hbase-protocol/src/main/protobuf/RPC.proto =================================================================== --- hbase-protocol/src/main/protobuf/RPC.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/RPC.proto (working copy) @@ -16,7 +16,7 @@ * limitations under the License. */ import "Tracing.proto"; -import "hbase.proto"; +import "HBase.proto"; option java_package = "org.apache.hadoop.hbase.protobuf.generated"; option java_outer_classname = "RPCProtos"; Index: hbase-protocol/src/main/protobuf/ClusterStatus.proto =================================================================== --- hbase-protocol/src/main/protobuf/ClusterStatus.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/ClusterStatus.proto (working copy) @@ -23,7 +23,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; import "ClusterId.proto"; import "FS.proto"; Index: hbase-protocol/src/main/protobuf/Filter.proto =================================================================== --- hbase-protocol/src/main/protobuf/Filter.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/Filter.proto (working copy) @@ -24,7 +24,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; import "Comparator.proto"; message Filter { Index: hbase-protocol/src/main/protobuf/Aggregate.proto =================================================================== --- hbase-protocol/src/main/protobuf/Aggregate.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/Aggregate.proto (working copy) @@ -24,8 +24,8 @@ import "Client.proto"; -message AggregateArgument { - /** The argument passed to the AggregateService consists of three parts +message AggregateRequest { + /** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object @@ -52,11 +52,11 @@ * AggregateService method implementations and their functionality. */ service AggregateService { - rpc GetMax (AggregateArgument) returns (AggregateResponse); - rpc GetMin (AggregateArgument) returns (AggregateResponse); - rpc GetSum (AggregateArgument) returns (AggregateResponse); - rpc GetRowNum (AggregateArgument) returns (AggregateResponse); - rpc GetAvg (AggregateArgument) returns (AggregateResponse); - rpc GetStd (AggregateArgument) returns (AggregateResponse); - rpc GetMedian (AggregateArgument) returns (AggregateResponse); + rpc GetMax (AggregateRequest) returns (AggregateResponse); + rpc GetMin (AggregateRequest) returns (AggregateResponse); + rpc GetSum (AggregateRequest) returns (AggregateResponse); + rpc GetRowNum (AggregateRequest) returns (AggregateResponse); + rpc GetAvg (AggregateRequest) returns (AggregateResponse); + rpc GetStd (AggregateRequest) returns (AggregateResponse); + rpc GetMedian (AggregateRequest) returns (AggregateResponse); } Index: hbase-protocol/src/main/protobuf/Tracing.proto =================================================================== --- hbase-protocol/src/main/protobuf/Tracing.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/Tracing.proto (working copy) @@ -16,7 +16,7 @@ * limitations under the License. */ option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "Tracing"; +option java_outer_classname = "TracingProtos"; option java_generate_equals_and_hash = true; option optimize_for = SPEED; Index: hbase-protocol/src/main/protobuf/HFile.proto =================================================================== --- hbase-protocol/src/main/protobuf/HFile.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/HFile.proto (working copy) @@ -21,7 +21,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; // Map of name/values message FileInfoProto { Index: hbase-protocol/src/main/protobuf/Admin.proto =================================================================== --- hbase-protocol/src/main/protobuf/Admin.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/Admin.proto (working copy) @@ -25,7 +25,7 @@ option optimize_for = SPEED; import "Client.proto"; -import "hbase.proto"; +import "HBase.proto"; import "WAL.proto"; message GetRegionInfoRequest { Index: hbase-protocol/src/main/protobuf/AccessControl.proto =================================================================== --- hbase-protocol/src/main/protobuf/AccessControl.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/AccessControl.proto (working copy) @@ -22,7 +22,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; message Permission { enum Action { @@ -90,13 +90,13 @@ message RevokeResponse { } -message UserPermissionsRequest { +message GetUserPermissionsRequest { optional Permission.Type type = 1; optional TableName table_name = 2; optional bytes namespace_name = 3; } -message UserPermissionsResponse { +message GetUserPermissionsResponse { repeated UserPermission user_permission = 1; } @@ -114,8 +114,8 @@ rpc Revoke(RevokeRequest) returns (RevokeResponse); - rpc GetUserPermissions(UserPermissionsRequest) - returns (UserPermissionsResponse); + rpc GetUserPermissions(GetUserPermissionsRequest) + returns (GetUserPermissionsResponse); rpc CheckPermissions(CheckPermissionsRequest) returns (CheckPermissionsResponse); Index: hbase-protocol/src/main/protobuf/MasterMonitor.proto =================================================================== --- hbase-protocol/src/main/protobuf/MasterMonitor.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/MasterMonitor.proto (working copy) @@ -25,7 +25,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; import "ClusterStatus.proto"; message GetSchemaAlterStatusRequest { Index: hbase-protocol/src/main/protobuf/MultiRowMutation.proto =================================================================== --- hbase-protocol/src/main/protobuf/MultiRowMutation.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/MultiRowMutation.proto (working copy) @@ -17,19 +17,19 @@ */ import "Client.proto"; option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "MultiRowMutation"; +option java_outer_classname = "MultiRowMutationProtos"; option java_generate_equals_and_hash = true; option java_generic_services = true; option optimize_for = SPEED; -message MultiMutateRequest { +message MutateRowsRequest { repeated MutationProto mutation_request = 1; } -message MultiMutateResponse { +message MutateRowsResponse { } service MultiRowMutationService { - rpc MutateRows(MultiMutateRequest) - returns(MultiMutateResponse); -} + rpc MutateRows(MutateRowsRequest) + returns(MutateRowsResponse); +} \ No newline at end of file Index: hbase-protocol/src/main/protobuf/ZooKeeper.proto =================================================================== --- hbase-protocol/src/main/protobuf/ZooKeeper.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/ZooKeeper.proto (working copy) @@ -25,7 +25,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; /** * Content of the meta-region-server znode. Index: hbase-protocol/src/main/protobuf/MasterAdmin.proto =================================================================== --- hbase-protocol/src/main/protobuf/MasterAdmin.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/MasterAdmin.proto (working copy) @@ -26,7 +26,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; import "Client.proto"; /* Column-level protobufs */ @@ -158,6 +158,9 @@ required NamespaceDescriptor namespaceDescriptor = 1; } +message ModifyNamespaceResponse { +} + message GetNamespaceDescriptorRequest { required string namespaceName = 1; } @@ -166,9 +169,6 @@ required NamespaceDescriptor namespaceDescriptor = 1; } -message ModifyNamespaceResponse { -} - message ListNamespaceDescriptorsRequest { } @@ -223,10 +223,10 @@ optional bool prev_balance_value = 1; } -message CatalogScanRequest { +message RunCatalogScanRequest { } -message CatalogScanResponse { +message RunCatalogScanResponse { optional int32 scan_result = 1; } @@ -245,26 +245,26 @@ required bool value = 1; } -message TakeSnapshotRequest{ +message SnapshotRequest { required SnapshotDescription snapshot = 1; } -message TakeSnapshotResponse{ +message SnapshotResponse { required int64 expected_timeout = 1; } -message ListSnapshotRequest{ +message GetCompletedSnapshotsRequest { } -message ListSnapshotResponse{ +message GetCompletedSnapshotsResponse { repeated SnapshotDescription snapshots = 1; } -message DeleteSnapshotRequest{ +message DeleteSnapshotRequest { required SnapshotDescription snapshot = 1; } -message DeleteSnapshotResponse{ +message DeleteSnapshotResponse { } message RestoreSnapshotRequest { @@ -277,11 +277,11 @@ /* if you don't send the snapshot, then you will get it back * in the response (if the snapshot is done) so you can check the snapshot */ -message IsSnapshotDoneRequest{ +message IsSnapshotDoneRequest { optional SnapshotDescription snapshot = 1; } -message IsSnapshotDoneResponse{ +message IsSnapshotDoneResponse { optional bool done = 1 [default = false]; optional SnapshotDescription snapshot = 2; } @@ -381,8 +381,8 @@ returns(SetBalancerRunningResponse); /** Get a run of the catalog janitor */ - rpc RunCatalogScan(CatalogScanRequest) - returns(CatalogScanResponse); + rpc RunCatalogScan(RunCatalogScanRequest) + returns(RunCatalogScanResponse); /** * Enable the catalog janitor on or off. @@ -405,13 +405,13 @@ /** * Create a snapshot for the given table. */ - rpc Snapshot(TakeSnapshotRequest) returns(TakeSnapshotResponse); + rpc Snapshot(SnapshotRequest) returns(SnapshotResponse); /** - * List completed snapshots. + * Get completed snapshots. * Returns a list of snapshot descriptors for completed snapshots */ - rpc GetCompletedSnapshots(ListSnapshotRequest) returns(ListSnapshotResponse); + rpc GetCompletedSnapshots(GetCompletedSnapshotsRequest) returns(GetCompletedSnapshotsResponse); /** * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot. @@ -445,7 +445,7 @@ rpc CreateNamespace(CreateNamespaceRequest) returns(CreateNamespaceResponse); - /** Delete's namespace synchronously */ + /** Deletes namespace synchronously */ rpc DeleteNamespace(DeleteNamespaceRequest) returns(DeleteNamespaceResponse); Index: hbase-protocol/src/main/protobuf/SecureBulkLoad.proto =================================================================== --- hbase-protocol/src/main/protobuf/SecureBulkLoad.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/SecureBulkLoad.proto (working copy) @@ -22,13 +22,13 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import 'hbase.proto'; +import 'HBase.proto'; import 'Client.proto'; message SecureBulkLoadHFilesRequest { repeated BulkLoadHFileRequest.FamilyPath family_path = 1; optional bool assign_seq_num = 2; - required DelegationTokenProto fs_token = 3; + required DelegationToken fs_token = 3; required string bulk_token = 4; } @@ -36,7 +36,7 @@ required bool loaded = 1; } -message DelegationTokenProto { +message DelegationToken { optional bytes identifier = 1; optional bytes password = 2; optional string kind = 3; Index: hbase-protocol/src/main/protobuf/MapReduce.proto =================================================================== --- hbase-protocol/src/main/protobuf/MapReduce.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/MapReduce.proto (working copy) @@ -23,7 +23,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; - import "hbase.proto"; + import "HBase.proto"; message ScanMetrics { Index: hbase-protocol/src/main/protobuf/RowProcessor.proto =================================================================== --- hbase-protocol/src/main/protobuf/RowProcessor.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/RowProcessor.proto (working copy) @@ -26,16 +26,16 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -message RowProcessorRequest { +message ProcessRequest { required string row_processor_class_name = 1; optional string row_processor_initializer_message_name = 2; optional bytes row_processor_initializer_message = 3; } -message RowProcessorResult { +message ProcessResponse { required bytes row_processor_result = 1; } service RowProcessorService { - rpc Process (RowProcessorRequest) returns (RowProcessorResult); + rpc Process(ProcessRequest) returns (ProcessResponse); } Index: hbase-protocol/src/main/protobuf/RegionServerStatus.proto =================================================================== --- hbase-protocol/src/main/protobuf/RegionServerStatus.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/RegionServerStatus.proto (working copy) @@ -24,7 +24,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; import "ClusterStatus.proto"; message RegionServerStartupRequest { Index: hbase-protocol/src/main/protobuf/Client.proto =================================================================== --- hbase-protocol/src/main/protobuf/Client.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/Client.proto (working copy) @@ -24,7 +24,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; import "Filter.proto"; import "Cell.proto"; import "Comparator.proto"; Index: hbase-protocol/src/main/protobuf/Authentication.proto =================================================================== --- hbase-protocol/src/main/protobuf/Authentication.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/Authentication.proto (working copy) @@ -55,10 +55,10 @@ // RPC request & response messages -message TokenRequest { +message GetAuthenticationTokenRequest { } -message TokenResponse { +message GetAuthenticationTokenResponse { optional Token token = 1; } @@ -73,8 +73,8 @@ // RPC service service AuthenticationService { - rpc GetAuthenticationToken(TokenRequest) - returns (TokenResponse); + rpc GetAuthenticationToken(GetAuthenticationTokenRequest) + returns (GetAuthenticationTokenResponse); rpc WhoAmI(WhoAmIRequest) returns (WhoAmIResponse); Index: hbase-protocol/src/main/protobuf/hbase.proto =================================================================== --- hbase-protocol/src/main/protobuf/hbase.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/hbase.proto (working copy) @@ -1,187 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// This file contains protocol buffers that are shared throughout HBase - -option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "HBaseProtos"; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -import "Cell.proto"; - -/** - * Table Name - */ -message TableName { - required bytes namespace = 1; - required bytes qualifier = 2; -} - -/** - * Table Schema - * Inspired by the rest TableSchema - */ -message TableSchema { - optional TableName table_name = 1; - repeated BytesBytesPair attributes = 2; - repeated ColumnFamilySchema column_families = 3; - repeated NameStringPair configuration = 4; -} - -/** - * Column Family Schema - * Inspired by the rest ColumSchemaMessage - */ -message ColumnFamilySchema { - required bytes name = 1; - repeated BytesBytesPair attributes = 2; - repeated NameStringPair configuration = 3; -} - -/** - * Protocol buffer version of HRegionInfo. - */ -message RegionInfo { - required uint64 region_id = 1; - required TableName table_name = 2; - optional bytes start_key = 3; - optional bytes end_key = 4; - optional bool offline = 5; - optional bool split = 6; -} - -/** - * Protocol buffer for favored nodes - */ -message FavoredNodes { - repeated ServerName favored_node = 1; -} - -/** - * Container protocol buffer to specify a region. - * You can specify region by region name, or the hash - * of the region name, which is known as encoded - * region name. - */ -message RegionSpecifier { - required RegionSpecifierType type = 1; - required bytes value = 2; - - enum RegionSpecifierType { - // ,,. - REGION_NAME = 1; - - // hash of ,, - ENCODED_REGION_NAME = 2; - } -} - -/** - * A range of time. Both from and to are Java time - * stamp in milliseconds. If you don't specify a time - * range, it means all time. By default, if not - * specified, from = 0, and to = Long.MAX_VALUE - */ -message TimeRange { - optional uint64 from = 1; - optional uint64 to = 2; -} - -/* Comparison operators */ -enum CompareType { - LESS = 0; - LESS_OR_EQUAL = 1; - EQUAL = 2; - NOT_EQUAL = 3; - GREATER_OR_EQUAL = 4; - GREATER = 5; - NO_OP = 6; -} - -/** - * Protocol buffer version of ServerName - */ -message ServerName { - required string host_name = 1; - optional uint32 port = 2; - optional uint64 start_code = 3; -} - -// Comment data structures - -message Coprocessor { - required string name = 1; -} - -message NameStringPair { - required string name = 1; - required string value = 2; -} - -message NameBytesPair { - required string name = 1; - optional bytes value = 2; -} - -message BytesBytesPair { - required bytes first = 1; - required bytes second = 2; -} - -message NameInt64Pair { - optional string name = 1; - optional int64 value = 2; -} - -/** - * Description of the snapshot to take - */ -message SnapshotDescription { - required string name = 1; - optional string table = 2; // not needed for delete, but checked for in taking snapshot - optional int64 creation_time = 3 [default = 0]; - enum Type { - DISABLED = 0; - FLUSH = 1; - } - optional Type type = 4 [default = FLUSH]; - optional int32 version = 5; -} - -message EmptyMsg { -} - -message LongMsg { - required int64 long_msg = 1; - -} - -message BigDecimalMsg { - required bytes bigdecimal_msg = 1; -} - -message UUID { - required uint64 least_sig_bits = 1; - required uint64 most_sig_bits = 2; -} - -message NamespaceDescriptor { - required bytes name = 1; - repeated NameStringPair configuration = 2; -} Index: hbase-protocol/src/main/protobuf/WAL.proto =================================================================== --- hbase-protocol/src/main/protobuf/WAL.proto (revision 1522009) +++ hbase-protocol/src/main/protobuf/WAL.proto (working copy) @@ -21,7 +21,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; -import "hbase.proto"; +import "HBase.proto"; message WALHeader { optional bool has_compression = 1; Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TracingProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TracingProtos.java (revision 0) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TracingProtos.java (revision 1522038) @@ -0,0 +1,591 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Tracing.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class TracingProtos { + private TracingProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface RPCTInfoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional int64 trace_id = 1; + /** + * optional int64 trace_id = 1; + */ + boolean hasTraceId(); + /** + * optional int64 trace_id = 1; + */ + long getTraceId(); + + // optional int64 parent_id = 2; + /** + * optional int64 parent_id = 2; + */ + boolean hasParentId(); + /** + * optional int64 parent_id = 2; + */ + long getParentId(); + } + /** + * Protobuf type {@code RPCTInfo} + * + *
+   *Used to pass through the information necessary to continue
+   *a trace after an RPC is made. All we need is the traceid 
+   *(so we know the overarching trace this message is a part of), and
+   *the id of the current span when this message was sent, so we know 
+   *what span caused the new span we will create when this message is received.
+   * 
+ */ + public static final class RPCTInfo extends + com.google.protobuf.GeneratedMessage + implements RPCTInfoOrBuilder { + // Use RPCTInfo.newBuilder() to construct. + private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RPCTInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RPCTInfo defaultInstance; + public static RPCTInfo getDefaultInstance() { + return defaultInstance; + } + + public RPCTInfo getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RPCTInfo( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + traceId_ = input.readInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + parentId_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RPCTInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RPCTInfo(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional int64 trace_id = 1; + public static final int TRACE_ID_FIELD_NUMBER = 1; + private long traceId_; + /** + * optional int64 trace_id = 1; + */ + public boolean hasTraceId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int64 trace_id = 1; + */ + public long getTraceId() { + return traceId_; + } + + // optional int64 parent_id = 2; + public static final int PARENT_ID_FIELD_NUMBER = 2; + private long parentId_; + /** + * optional int64 parent_id = 2; + */ + public boolean hasParentId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int64 parent_id = 2; + */ + public long getParentId() { + return parentId_; + } + + private void initFields() { + traceId_ = 0L; + parentId_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, traceId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt64(2, parentId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, traceId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, parentId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo other = (org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo) obj; + + boolean result = true; + result = result && (hasTraceId() == other.hasTraceId()); + if (hasTraceId()) { + result = result && (getTraceId() + == other.getTraceId()); + } + result = result && (hasParentId() == other.hasParentId()); + if (hasParentId()) { + result = result && (getParentId() + == other.getParentId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTraceId()) { + hash = (37 * hash) + TRACE_ID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTraceId()); + } + if (hasParentId()) { + hash = (37 * hash) + PARENT_ID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getParentId()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code RPCTInfo} + * + *
+     *Used to pass through the information necessary to continue
+     *a trace after an RPC is made. All we need is the traceid 
+     *(so we know the overarching trace this message is a part of), and
+     *the id of the current span when this message was sent, so we know 
+     *what span caused the new span we will create when this message is received.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + traceId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + parentId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo build() { + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.traceId_ = traceId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.parentId_ = parentId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance()) return this; + if (other.hasTraceId()) { + setTraceId(other.getTraceId()); + } + if (other.hasParentId()) { + setParentId(other.getParentId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional int64 trace_id = 1; + private long traceId_ ; + /** + * optional int64 trace_id = 1; + */ + public boolean hasTraceId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int64 trace_id = 1; + */ + public long getTraceId() { + return traceId_; + } + /** + * optional int64 trace_id = 1; + */ + public Builder setTraceId(long value) { + bitField0_ |= 0x00000001; + traceId_ = value; + onChanged(); + return this; + } + /** + * optional int64 trace_id = 1; + */ + public Builder clearTraceId() { + bitField0_ = (bitField0_ & ~0x00000001); + traceId_ = 0L; + onChanged(); + return this; + } + + // optional int64 parent_id = 2; + private long parentId_ ; + /** + * optional int64 parent_id = 2; + */ + public boolean hasParentId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional int64 parent_id = 2; + */ + public long getParentId() { + return parentId_; + } + /** + * optional int64 parent_id = 2; + */ + public Builder setParentId(long value) { + bitField0_ |= 0x00000002; + parentId_ = value; + onChanged(); + return this; + } + /** + * optional int64 parent_id = 2; + */ + public Builder clearParentId() { + bitField0_ = (bitField0_ & ~0x00000002); + parentId_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RPCTInfo) + } + + static { + defaultInstance = new RPCTInfo(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RPCTInfo) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RPCTInfo_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RPCTInfo_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\rTracing.proto\"/\n\010RPCTInfo\022\020\n\010trace_id\030" + + "\001 \001(\003\022\021\n\tparent_id\030\002 \001(\003B@\n*org.apache.h" + + "adoop.hbase.protobuf.generatedB\rTracingP" + + "rotosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_RPCTInfo_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RPCTInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RPCTInfo_descriptor, + new java.lang.String[] { "TraceId", "ParentId", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProtos.java (revision 0) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProtos.java (revision 1522038) @@ -0,0 +1,1358 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: MultiRowMutation.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class MultiRowMutationProtos { + private MultiRowMutationProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface MutateRowsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .MutationProto mutation_request = 1; + /** + * repeated .MutationProto mutation_request = 1; + */ + java.util.List + getMutationRequestList(); + /** + * repeated .MutationProto mutation_request = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index); + /** + * repeated .MutationProto mutation_request = 1; + */ + int getMutationRequestCount(); + /** + * repeated .MutationProto mutation_request = 1; + */ + java.util.List + getMutationRequestOrBuilderList(); + /** + * repeated .MutationProto mutation_request = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( + int index); + } + /** + * Protobuf type {@code MutateRowsRequest} + */ + public static final class MutateRowsRequest extends + com.google.protobuf.GeneratedMessage + implements MutateRowsRequestOrBuilder { + // Use MutateRowsRequest.newBuilder() to construct. + private MutateRowsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MutateRowsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MutateRowsRequest defaultInstance; + public static MutateRowsRequest getDefaultInstance() { + return defaultInstance; + } + + public MutateRowsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MutateRowsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mutationRequest_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + mutationRequest_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MutateRowsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MutateRowsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .MutationProto mutation_request = 1; + public static final int MUTATION_REQUEST_FIELD_NUMBER = 1; + private java.util.List mutationRequest_; + /** + * repeated .MutationProto mutation_request = 1; + */ + public java.util.List getMutationRequestList() { + return mutationRequest_; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public java.util.List + getMutationRequestOrBuilderList() { + return mutationRequest_; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public int getMutationRequestCount() { + return mutationRequest_.size(); + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) { + return mutationRequest_.get(index); + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( + int index) { + return mutationRequest_.get(index); + } + + private void initFields() { + mutationRequest_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getMutationRequestCount(); i++) { + if (!getMutationRequest(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < mutationRequest_.size(); i++) { + output.writeMessage(1, mutationRequest_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < mutationRequest_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, mutationRequest_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) obj; + + boolean result = true; + result = result && getMutationRequestList() + .equals(other.getMutationRequestList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getMutationRequestCount() > 0) { + hash = (37 * hash) + MUTATION_REQUEST_FIELD_NUMBER; + hash = (53 * hash) + getMutationRequestList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code MutateRowsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getMutationRequestFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (mutationRequestBuilder_ == null) { + mutationRequest_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + mutationRequestBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest(this); + int from_bitField0_ = bitField0_; + if (mutationRequestBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.mutationRequest_ = mutationRequest_; + } else { + result.mutationRequest_ = mutationRequestBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.getDefaultInstance()) return this; + if (mutationRequestBuilder_ == null) { + if (!other.mutationRequest_.isEmpty()) { + if (mutationRequest_.isEmpty()) { + mutationRequest_ = other.mutationRequest_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureMutationRequestIsMutable(); + mutationRequest_.addAll(other.mutationRequest_); + } + onChanged(); + } + } else { + if (!other.mutationRequest_.isEmpty()) { + if (mutationRequestBuilder_.isEmpty()) { + mutationRequestBuilder_.dispose(); + mutationRequestBuilder_ = null; + mutationRequest_ = other.mutationRequest_; + bitField0_ = (bitField0_ & ~0x00000001); + mutationRequestBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getMutationRequestFieldBuilder() : null; + } else { + mutationRequestBuilder_.addAllMessages(other.mutationRequest_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getMutationRequestCount(); i++) { + if (!getMutationRequest(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .MutationProto mutation_request = 1; + private java.util.List mutationRequest_ = + java.util.Collections.emptyList(); + private void ensureMutationRequestIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + mutationRequest_ = new java.util.ArrayList(mutationRequest_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationRequestBuilder_; + + /** + * repeated .MutationProto mutation_request = 1; + */ + public java.util.List getMutationRequestList() { + if (mutationRequestBuilder_ == null) { + return java.util.Collections.unmodifiableList(mutationRequest_); + } else { + return mutationRequestBuilder_.getMessageList(); + } + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public int getMutationRequestCount() { + if (mutationRequestBuilder_ == null) { + return mutationRequest_.size(); + } else { + return mutationRequestBuilder_.getCount(); + } + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) { + if (mutationRequestBuilder_ == null) { + return mutationRequest_.get(index); + } else { + return mutationRequestBuilder_.getMessage(index); + } + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder setMutationRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { + if (mutationRequestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationRequestIsMutable(); + mutationRequest_.set(index, value); + onChanged(); + } else { + mutationRequestBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder setMutationRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + mutationRequest_.set(index, builderForValue.build()); + onChanged(); + } else { + mutationRequestBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder addMutationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { + if (mutationRequestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationRequestIsMutable(); + mutationRequest_.add(value); + onChanged(); + } else { + mutationRequestBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder addMutationRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { + if (mutationRequestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMutationRequestIsMutable(); + mutationRequest_.add(index, value); + onChanged(); + } else { + mutationRequestBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder addMutationRequest( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + mutationRequest_.add(builderForValue.build()); + onChanged(); + } else { + mutationRequestBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder addMutationRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + mutationRequest_.add(index, builderForValue.build()); + onChanged(); + } else { + mutationRequestBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder addAllMutationRequest( + java.lang.Iterable values) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + super.addAll(values, mutationRequest_); + onChanged(); + } else { + mutationRequestBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder clearMutationRequest() { + if (mutationRequestBuilder_ == null) { + mutationRequest_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + mutationRequestBuilder_.clear(); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public Builder removeMutationRequest(int index) { + if (mutationRequestBuilder_ == null) { + ensureMutationRequestIsMutable(); + mutationRequest_.remove(index); + onChanged(); + } else { + mutationRequestBuilder_.remove(index); + } + return this; + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationRequestBuilder( + int index) { + return getMutationRequestFieldBuilder().getBuilder(index); + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( + int index) { + if (mutationRequestBuilder_ == null) { + return mutationRequest_.get(index); } else { + return mutationRequestBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public java.util.List + getMutationRequestOrBuilderList() { + if (mutationRequestBuilder_ != null) { + return mutationRequestBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(mutationRequest_); + } + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder() { + return getMutationRequestFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()); + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder( + int index) { + return getMutationRequestFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()); + } + /** + * repeated .MutationProto mutation_request = 1; + */ + public java.util.List + getMutationRequestBuilderList() { + return getMutationRequestFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> + getMutationRequestFieldBuilder() { + if (mutationRequestBuilder_ == null) { + mutationRequestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( + mutationRequest_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + mutationRequest_ = null; + } + return mutationRequestBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MutateRowsRequest) + } + + static { + defaultInstance = new MutateRowsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MutateRowsRequest) + } + + public interface MutateRowsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code MutateRowsResponse} + */ + public static final class MutateRowsResponse extends + com.google.protobuf.GeneratedMessage + implements MutateRowsResponseOrBuilder { + // Use MutateRowsResponse.newBuilder() to construct. + private MutateRowsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MutateRowsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MutateRowsResponse defaultInstance; + public static MutateRowsResponse getDefaultInstance() { + return defaultInstance; + } + + public MutateRowsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MutateRowsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MutateRowsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MutateRowsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code MutateRowsResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:MutateRowsResponse) + } + + static { + defaultInstance = new MutateRowsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MutateRowsResponse) + } + + /** + * Protobuf service {@code MultiRowMutationService} + */ + public static abstract class MultiRowMutationService + implements com.google.protobuf.Service { + protected MultiRowMutationService() {} + + public interface Interface { + /** + * rpc MutateRows(.MutateRowsRequest) returns (.MutateRowsResponse); + */ + public abstract void mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new MultiRowMutationService() { + @java.lang.Override + public void mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest request, + com.google.protobuf.RpcCallback done) { + impl.mutateRows(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc MutateRows(.MutateRowsRequest) returns (.MutateRowsResponse); + */ + public abstract void mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse mutateRows( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:MultiRowMutationService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MutateRowsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MutateRowsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MutateRowsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MutateRowsResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\026MultiRowMutation.proto\032\014Client.proto\"=" + + "\n\021MutateRowsRequest\022(\n\020mutation_request\030" + + "\001 \003(\0132\016.MutationProto\"\024\n\022MutateRowsRespo" + + "nse2P\n\027MultiRowMutationService\0225\n\nMutate" + + "Rows\022\022.MutateRowsRequest\032\023.MutateRowsRes" + + "ponseBL\n*org.apache.hadoop.hbase.protobu" + + "f.generatedB\026MultiRowMutationProtosH\001\210\001\001" + + "\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_MutateRowsRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_MutateRowsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MutateRowsRequest_descriptor, + new java.lang.String[] { "MutationRequest", }); + internal_static_MutateRowsResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_MutateRowsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MutateRowsResponse_descriptor, + new java.lang.String[] { }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutation.java (working copy) @@ -1,1358 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: MultiRowMutation.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class MultiRowMutation { - private MultiRowMutation() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface MultiMutateRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .MutationProto mutation_request = 1; - /** - * repeated .MutationProto mutation_request = 1; - */ - java.util.List - getMutationRequestList(); - /** - * repeated .MutationProto mutation_request = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index); - /** - * repeated .MutationProto mutation_request = 1; - */ - int getMutationRequestCount(); - /** - * repeated .MutationProto mutation_request = 1; - */ - java.util.List - getMutationRequestOrBuilderList(); - /** - * repeated .MutationProto mutation_request = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( - int index); - } - /** - * Protobuf type {@code MultiMutateRequest} - */ - public static final class MultiMutateRequest extends - com.google.protobuf.GeneratedMessage - implements MultiMutateRequestOrBuilder { - // Use MultiMutateRequest.newBuilder() to construct. - private MultiMutateRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MultiMutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MultiMutateRequest defaultInstance; - public static MultiMutateRequest getDefaultInstance() { - return defaultInstance; - } - - public MultiMutateRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private MultiMutateRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - mutationRequest_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - mutationRequest_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry)); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultiMutateRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiMutateRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .MutationProto mutation_request = 1; - public static final int MUTATION_REQUEST_FIELD_NUMBER = 1; - private java.util.List mutationRequest_; - /** - * repeated .MutationProto mutation_request = 1; - */ - public java.util.List getMutationRequestList() { - return mutationRequest_; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public java.util.List - getMutationRequestOrBuilderList() { - return mutationRequest_; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public int getMutationRequestCount() { - return mutationRequest_.size(); - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) { - return mutationRequest_.get(index); - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( - int index) { - return mutationRequest_.get(index); - } - - private void initFields() { - mutationRequest_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getMutationRequestCount(); i++) { - if (!getMutationRequest(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < mutationRequest_.size(); i++) { - output.writeMessage(1, mutationRequest_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < mutationRequest_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, mutationRequest_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) obj; - - boolean result = true; - result = result && getMutationRequestList() - .equals(other.getMutationRequestList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getMutationRequestCount() > 0) { - hash = (37 * hash) + MUTATION_REQUEST_FIELD_NUMBER; - hash = (53 * hash) + getMutationRequestList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code MultiMutateRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getMutationRequestFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (mutationRequestBuilder_ == null) { - mutationRequest_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - mutationRequestBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest(this); - int from_bitField0_ = bitField0_; - if (mutationRequestBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.mutationRequest_ = mutationRequest_; - } else { - result.mutationRequest_ = mutationRequestBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance()) return this; - if (mutationRequestBuilder_ == null) { - if (!other.mutationRequest_.isEmpty()) { - if (mutationRequest_.isEmpty()) { - mutationRequest_ = other.mutationRequest_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureMutationRequestIsMutable(); - mutationRequest_.addAll(other.mutationRequest_); - } - onChanged(); - } - } else { - if (!other.mutationRequest_.isEmpty()) { - if (mutationRequestBuilder_.isEmpty()) { - mutationRequestBuilder_.dispose(); - mutationRequestBuilder_ = null; - mutationRequest_ = other.mutationRequest_; - bitField0_ = (bitField0_ & ~0x00000001); - mutationRequestBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getMutationRequestFieldBuilder() : null; - } else { - mutationRequestBuilder_.addAllMessages(other.mutationRequest_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getMutationRequestCount(); i++) { - if (!getMutationRequest(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // repeated .MutationProto mutation_request = 1; - private java.util.List mutationRequest_ = - java.util.Collections.emptyList(); - private void ensureMutationRequestIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - mutationRequest_ = new java.util.ArrayList(mutationRequest_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationRequestBuilder_; - - /** - * repeated .MutationProto mutation_request = 1; - */ - public java.util.List getMutationRequestList() { - if (mutationRequestBuilder_ == null) { - return java.util.Collections.unmodifiableList(mutationRequest_); - } else { - return mutationRequestBuilder_.getMessageList(); - } - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public int getMutationRequestCount() { - if (mutationRequestBuilder_ == null) { - return mutationRequest_.size(); - } else { - return mutationRequestBuilder_.getCount(); - } - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) { - if (mutationRequestBuilder_ == null) { - return mutationRequest_.get(index); - } else { - return mutationRequestBuilder_.getMessage(index); - } - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder setMutationRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { - if (mutationRequestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMutationRequestIsMutable(); - mutationRequest_.set(index, value); - onChanged(); - } else { - mutationRequestBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder setMutationRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - mutationRequest_.set(index, builderForValue.build()); - onChanged(); - } else { - mutationRequestBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder addMutationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { - if (mutationRequestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMutationRequestIsMutable(); - mutationRequest_.add(value); - onChanged(); - } else { - mutationRequestBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder addMutationRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { - if (mutationRequestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMutationRequestIsMutable(); - mutationRequest_.add(index, value); - onChanged(); - } else { - mutationRequestBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder addMutationRequest( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - mutationRequest_.add(builderForValue.build()); - onChanged(); - } else { - mutationRequestBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder addMutationRequest( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - mutationRequest_.add(index, builderForValue.build()); - onChanged(); - } else { - mutationRequestBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder addAllMutationRequest( - java.lang.Iterable values) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - super.addAll(values, mutationRequest_); - onChanged(); - } else { - mutationRequestBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder clearMutationRequest() { - if (mutationRequestBuilder_ == null) { - mutationRequest_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - mutationRequestBuilder_.clear(); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public Builder removeMutationRequest(int index) { - if (mutationRequestBuilder_ == null) { - ensureMutationRequestIsMutable(); - mutationRequest_.remove(index); - onChanged(); - } else { - mutationRequestBuilder_.remove(index); - } - return this; - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationRequestBuilder( - int index) { - return getMutationRequestFieldBuilder().getBuilder(index); - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder( - int index) { - if (mutationRequestBuilder_ == null) { - return mutationRequest_.get(index); } else { - return mutationRequestBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public java.util.List - getMutationRequestOrBuilderList() { - if (mutationRequestBuilder_ != null) { - return mutationRequestBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(mutationRequest_); - } - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder() { - return getMutationRequestFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()); - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder( - int index) { - return getMutationRequestFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()); - } - /** - * repeated .MutationProto mutation_request = 1; - */ - public java.util.List - getMutationRequestBuilderList() { - return getMutationRequestFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> - getMutationRequestFieldBuilder() { - if (mutationRequestBuilder_ == null) { - mutationRequestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( - mutationRequest_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - mutationRequest_ = null; - } - return mutationRequestBuilder_; - } - - // @@protoc_insertion_point(builder_scope:MultiMutateRequest) - } - - static { - defaultInstance = new MultiMutateRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiMutateRequest) - } - - public interface MultiMutateResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code MultiMutateResponse} - */ - public static final class MultiMutateResponse extends - com.google.protobuf.GeneratedMessage - implements MultiMutateResponseOrBuilder { - // Use MultiMutateResponse.newBuilder() to construct. - private MultiMutateResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private MultiMutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final MultiMutateResponse defaultInstance; - public static MultiMutateResponse getDefaultInstance() { - return defaultInstance; - } - - public MultiMutateResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private MultiMutateResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public MultiMutateResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new MultiMutateResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code MultiMutateResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - // @@protoc_insertion_point(builder_scope:MultiMutateResponse) - } - - static { - defaultInstance = new MultiMutateResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:MultiMutateResponse) - } - - /** - * Protobuf service {@code MultiRowMutationService} - */ - public static abstract class MultiRowMutationService - implements com.google.protobuf.Service { - protected MultiRowMutationService() {} - - public interface Interface { - /** - * rpc MutateRows(.MultiMutateRequest) returns (.MultiMutateResponse); - */ - public abstract void mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new MultiRowMutationService() { - @java.lang.Override - public void mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, - com.google.protobuf.RpcCallback done) { - impl.mutateRows(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc MutateRows(.MultiMutateRequest) returns (.MultiMutateResponse); - */ - public abstract void mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.mutateRows(controller, (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse mutateRows( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:MultiRowMutationService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiMutateRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiMutateRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_MultiMutateResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_MultiMutateResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\026MultiRowMutation.proto\032\014Client.proto\">" + - "\n\022MultiMutateRequest\022(\n\020mutation_request" + - "\030\001 \003(\0132\016.MutationProto\"\025\n\023MultiMutateRes" + - "ponse2R\n\027MultiRowMutationService\0227\n\nMuta" + - "teRows\022\023.MultiMutateRequest\032\024.MultiMutat" + - "eResponseBF\n*org.apache.hadoop.hbase.pro" + - "tobuf.generatedB\020MultiRowMutationH\001\210\001\001\240\001" + - "\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_MultiMutateRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_MultiMutateRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiMutateRequest_descriptor, - new java.lang.String[] { "MutationRequest", }); - internal_static_MultiMutateResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_MultiMutateResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_MultiMutateResponse_descriptor, - new java.lang.String[] { }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java (working copy) @@ -7351,7 +7351,7 @@ // @@protoc_insertion_point(class_scope:RevokeResponse) } - public interface UserPermissionsRequestOrBuilder + public interface GetUserPermissionsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional .Permission.Type type = 1; @@ -7389,24 +7389,24 @@ com.google.protobuf.ByteString getNamespaceName(); } /** - * Protobuf type {@code UserPermissionsRequest} + * Protobuf type {@code GetUserPermissionsRequest} */ - public static final class UserPermissionsRequest extends + public static final class GetUserPermissionsRequest extends com.google.protobuf.GeneratedMessage - implements UserPermissionsRequestOrBuilder { - // Use UserPermissionsRequest.newBuilder() to construct. - private UserPermissionsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements GetUserPermissionsRequestOrBuilder { + // Use GetUserPermissionsRequest.newBuilder() to construct. + private GetUserPermissionsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private UserPermissionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private GetUserPermissionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final UserPermissionsRequest defaultInstance; - public static UserPermissionsRequest getDefaultInstance() { + private static final GetUserPermissionsRequest defaultInstance; + public static GetUserPermissionsRequest getDefaultInstance() { return defaultInstance; } - public UserPermissionsRequest getDefaultInstanceForType() { + public GetUserPermissionsRequest getDefaultInstanceForType() { return defaultInstance; } @@ -7416,7 +7416,7 @@ getUnknownFields() { return this.unknownFields; } - private UserPermissionsRequest( + private GetUserPermissionsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -7482,28 +7482,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UserPermissionsRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetUserPermissionsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new UserPermissionsRequest(input, extensionRegistry); + return new GetUserPermissionsRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -7632,10 +7632,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest) obj; boolean result = true; result = result && (hasType() == other.hasType()); @@ -7683,53 +7683,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7738,7 +7738,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -7750,24 +7750,24 @@ return builder; } /** - * Protobuf type {@code UserPermissionsRequest} + * Protobuf type {@code GetUserPermissionsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -7807,23 +7807,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -7848,16 +7848,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.getDefaultInstance()) return this; if (other.hasType()) { setType(other.getType()); } @@ -7885,11 +7885,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -8089,18 +8089,18 @@ return this; } - // @@protoc_insertion_point(builder_scope:UserPermissionsRequest) + // @@protoc_insertion_point(builder_scope:GetUserPermissionsRequest) } static { - defaultInstance = new UserPermissionsRequest(true); + defaultInstance = new GetUserPermissionsRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:UserPermissionsRequest) + // @@protoc_insertion_point(class_scope:GetUserPermissionsRequest) } - public interface UserPermissionsResponseOrBuilder + public interface GetUserPermissionsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .UserPermission user_permission = 1; @@ -8129,24 +8129,24 @@ int index); } /** - * Protobuf type {@code UserPermissionsResponse} + * Protobuf type {@code GetUserPermissionsResponse} */ - public static final class UserPermissionsResponse extends + public static final class GetUserPermissionsResponse extends com.google.protobuf.GeneratedMessage - implements UserPermissionsResponseOrBuilder { - // Use UserPermissionsResponse.newBuilder() to construct. - private UserPermissionsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements GetUserPermissionsResponseOrBuilder { + // Use GetUserPermissionsResponse.newBuilder() to construct. + private GetUserPermissionsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private UserPermissionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private GetUserPermissionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final UserPermissionsResponse defaultInstance; - public static UserPermissionsResponse getDefaultInstance() { + private static final GetUserPermissionsResponse defaultInstance; + public static GetUserPermissionsResponse getDefaultInstance() { return defaultInstance; } - public UserPermissionsResponse getDefaultInstanceForType() { + public GetUserPermissionsResponse getDefaultInstanceForType() { return defaultInstance; } @@ -8156,7 +8156,7 @@ getUnknownFields() { return this.unknownFields; } - private UserPermissionsResponse( + private GetUserPermissionsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -8204,28 +8204,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public UserPermissionsResponse parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetUserPermissionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new UserPermissionsResponse(input, extensionRegistry); + return new GetUserPermissionsResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -8319,10 +8319,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse) obj; boolean result = true; result = result && getUserPermissionList() @@ -8349,53 +8349,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -8404,7 +8404,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -8416,24 +8416,24 @@ return builder; } /** - * Protobuf type {@code UserPermissionsResponse} + * Protobuf type {@code GetUserPermissionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -8469,23 +8469,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_UserPermissionsResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_GetUserPermissionsResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse(this); int from_bitField0_ = bitField0_; if (userPermissionBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { @@ -8501,16 +8501,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance()) return this; if (userPermissionBuilder_ == null) { if (!other.userPermission_.isEmpty()) { if (userPermission_.isEmpty()) { @@ -8555,11 +8555,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -8810,15 +8810,15 @@ return userPermissionBuilder_; } - // @@protoc_insertion_point(builder_scope:UserPermissionsResponse) + // @@protoc_insertion_point(builder_scope:GetUserPermissionsResponse) } static { - defaultInstance = new UserPermissionsResponse(true); + defaultInstance = new GetUserPermissionsResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:UserPermissionsResponse) + // @@protoc_insertion_point(class_scope:GetUserPermissionsResponse) } public interface CheckPermissionsRequestOrBuilder @@ -9905,12 +9905,12 @@ com.google.protobuf.RpcCallback done); /** - * rpc GetUserPermissions(.UserPermissionsRequest) returns (.UserPermissionsResponse); + * rpc GetUserPermissions(.GetUserPermissionsRequest) returns (.GetUserPermissionsResponse); */ public abstract void getUserPermissions( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest request, + com.google.protobuf.RpcCallback done); /** * rpc CheckPermissions(.CheckPermissionsRequest) returns (.CheckPermissionsResponse); @@ -9944,8 +9944,8 @@ @java.lang.Override public void getUserPermissions( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest request, + com.google.protobuf.RpcCallback done) { impl.getUserPermissions(controller, request, done); } @@ -9984,7 +9984,7 @@ case 1: return impl.revoke(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest)request); case 2: - return impl.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)request); + return impl.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest)request); case 3: return impl.checkPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)request); default: @@ -10006,7 +10006,7 @@ case 1: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); default: @@ -10028,7 +10028,7 @@ case 1: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); default: @@ -10056,12 +10056,12 @@ com.google.protobuf.RpcCallback done); /** - * rpc GetUserPermissions(.UserPermissionsRequest) returns (.UserPermissionsResponse); + * rpc GetUserPermissions(.GetUserPermissionsRequest) returns (.GetUserPermissionsResponse); */ public abstract void getUserPermissions( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest request, + com.google.protobuf.RpcCallback done); /** * rpc CheckPermissions(.CheckPermissionsRequest) returns (.CheckPermissionsResponse); @@ -10104,8 +10104,8 @@ done)); return; case 2: - this.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 3: @@ -10132,7 +10132,7 @@ case 1: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); default: @@ -10154,7 +10154,7 @@ case 1: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeResponse.getDefaultInstance(); case 2: - return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); default: @@ -10210,17 +10210,17 @@ public void getUserPermissions( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(2), controller, request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance())); } public void checkPermissions( @@ -10255,9 +10255,9 @@ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RevokeRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions( + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse getUserPermissions( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request) + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse checkPermissions( @@ -10297,15 +10297,15 @@ } - public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse getUserPermissions( + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse getUserPermissions( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsRequest request) + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(2), controller, request, - org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance()); } @@ -10381,15 +10381,15 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RevokeResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_UserPermissionsRequest_descriptor; + internal_static_GetUserPermissionsRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UserPermissionsRequest_fieldAccessorTable; + internal_static_GetUserPermissionsRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_UserPermissionsResponse_descriptor; + internal_static_GetUserPermissionsResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UserPermissionsResponse_fieldAccessorTable; + internal_static_GetUserPermissionsResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_CheckPermissionsRequest_descriptor; private static @@ -10409,7 +10409,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\023AccessControl.proto\032\013hbase.proto\"\250\002\n\nP" + + "\n\023AccessControl.proto\032\013HBase.proto\"\250\002\n\nP" + "ermission\022\036\n\004type\030\001 \002(\0162\020.Permission.Typ" + "e\022,\n\021global_permission\030\002 \001(\0132\021.GlobalPer" + "mission\0222\n\024namespace_permission\030\003 \001(\0132\024." + @@ -10433,22 +10433,22 @@ "ermission\030\001 \002(\0132\017.UserPermission\"\017\n\rGran" + "tResponse\"9\n\rRevokeRequest\022(\n\017user_permi" + "ssion\030\001 \002(\0132\017.UserPermission\"\020\n\016RevokeRe" + - "sponse\"p\n\026UserPermissionsRequest\022\036\n\004type" + - "\030\001 \001(\0162\020.Permission.Type\022\036\n\ntable_name\030\002" + - " \001(\0132\n.TableName\022\026\n\016namespace_name\030\003 \001(\014" + - "\"C\n\027UserPermissionsResponse\022(\n\017user_perm" + - "ission\030\001 \003(\0132\017.UserPermission\":\n\027CheckPe" + - "rmissionsRequest\022\037\n\npermission\030\001 \003(\0132\013.P", - "ermission\"\032\n\030CheckPermissionsResponse2\373\001" + - "\n\024AccessControlService\022&\n\005Grant\022\r.GrantR" + - "equest\032\016.GrantResponse\022)\n\006Revoke\022\016.Revok" + - "eRequest\032\017.RevokeResponse\022G\n\022GetUserPerm" + - "issions\022\027.UserPermissionsRequest\032\030.UserP" + - "ermissionsResponse\022G\n\020CheckPermissions\022\030" + - ".CheckPermissionsRequest\032\031.CheckPermissi" + - "onsResponseBI\n*org.apache.hadoop.hbase.p" + - "rotobuf.generatedB\023AccessControlProtosH\001" + - "\210\001\001\240\001\001" + "sponse\"s\n\031GetUserPermissionsRequest\022\036\n\004t" + + "ype\030\001 \001(\0162\020.Permission.Type\022\036\n\ntable_nam" + + "e\030\002 \001(\0132\n.TableName\022\026\n\016namespace_name\030\003 " + + "\001(\014\"F\n\032GetUserPermissionsResponse\022(\n\017use" + + "r_permission\030\001 \003(\0132\017.UserPermission\":\n\027C" + + "heckPermissionsRequest\022\037\n\npermission\030\001 \003", + "(\0132\013.Permission\"\032\n\030CheckPermissionsRespo" + + "nse2\201\002\n\024AccessControlService\022&\n\005Grant\022\r." + + "GrantRequest\032\016.GrantResponse\022)\n\006Revoke\022\016" + + ".RevokeRequest\032\017.RevokeResponse\022M\n\022GetUs" + + "erPermissions\022\032.GetUserPermissionsReques" + + "t\032\033.GetUserPermissionsResponse\022G\n\020CheckP" + + "ermissions\022\030.CheckPermissionsRequest\032\031.C" + + "heckPermissionsResponseBI\n*org.apache.ha" + + "doop.hbase.protobuf.generatedB\023AccessCon" + + "trolProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -10521,17 +10521,17 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RevokeResponse_descriptor, new java.lang.String[] { }); - internal_static_UserPermissionsRequest_descriptor = + internal_static_GetUserPermissionsRequest_descriptor = getDescriptor().getMessageTypes().get(10); - internal_static_UserPermissionsRequest_fieldAccessorTable = new + internal_static_GetUserPermissionsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UserPermissionsRequest_descriptor, + internal_static_GetUserPermissionsRequest_descriptor, new java.lang.String[] { "Type", "TableName", "NamespaceName", }); - internal_static_UserPermissionsResponse_descriptor = + internal_static_GetUserPermissionsResponse_descriptor = getDescriptor().getMessageTypes().get(11); - internal_static_UserPermissionsResponse_fieldAccessorTable = new + internal_static_GetUserPermissionsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UserPermissionsResponse_descriptor, + internal_static_GetUserPermissionsResponse_descriptor, new java.lang.String[] { "UserPermission", }); internal_static_CheckPermissionsRequest_descriptor = getDescriptor().getMessageTypes().get(12); Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java (working copy) @@ -9391,7 +9391,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\017ZooKeeper.proto\032\013hbase.proto\"D\n\020MetaRe" + + "\n\017ZooKeeper.proto\032\013HBase.proto\"D\n\020MetaRe" + "gionServer\022\033\n\006server\030\001 \002(\0132\013.ServerName\022" + "\023\n\013rpc_version\030\002 \001(\r\":\n\006Master\022\033\n\006master" + "\030\001 \002(\0132\013.ServerName\022\023\n\013rpc_version\030\002 \001(\r" + Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterMonitorProtos.java (working copy) @@ -5098,7 +5098,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\023MasterMonitor.proto\032\014Master.proto\032\013hba" + + "\n\023MasterMonitor.proto\032\014Master.proto\032\013HBa" + "se.proto\032\023ClusterStatus.proto\"=\n\033GetSche" + "maAlterStatusRequest\022\036\n\ntable_name\030\001 \002(\013" + "2\n.TableName\"T\n\034GetSchemaAlterStatusResp" + Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java (working copy) @@ -46,19 +46,19 @@ */ boolean getAssignSeqNum(); - // required .DelegationTokenProto fs_token = 3; + // required .DelegationToken fs_token = 3; /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ boolean hasFsToken(); /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken(); + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken getFsToken(); /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder getFsTokenOrBuilder(); // required string bulk_token = 4; /** @@ -140,11 +140,11 @@ break; } case 26: { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = fsToken_.toBuilder(); } - fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.PARSER, extensionRegistry); + fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(fsToken_); fsToken_ = subBuilder.buildPartial(); @@ -252,25 +252,25 @@ return assignSeqNum_; } - // required .DelegationTokenProto fs_token = 3; + // required .DelegationToken fs_token = 3; public static final int FS_TOKEN_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto fsToken_; + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken fsToken_; /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ public boolean hasFsToken() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken() { + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken getFsToken() { return fsToken_; } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { return fsToken_; } @@ -320,7 +320,7 @@ private void initFields() { familyPath_ = java.util.Collections.emptyList(); assignSeqNum_ = false; - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); bulkToken_ = ""; } private byte memoizedIsInitialized = -1; @@ -575,7 +575,7 @@ assignSeqNum_ = false; bitField0_ = (bitField0_ & ~0x00000002); if (fsTokenBuilder_ == null) { - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); } else { fsTokenBuilder_.clear(); } @@ -1002,20 +1002,20 @@ return this; } - // required .DelegationTokenProto fs_token = 3; - private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + // required .DelegationToken fs_token = 3; + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder> fsTokenBuilder_; + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder> fsTokenBuilder_; /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ public boolean hasFsToken() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken() { + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken getFsToken() { if (fsTokenBuilder_ == null) { return fsToken_; } else { @@ -1023,9 +1023,9 @@ } } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto value) { + public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken value) { if (fsTokenBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -1039,10 +1039,10 @@ return this; } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ public Builder setFsToken( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder builderForValue) { if (fsTokenBuilder_ == null) { fsToken_ = builderForValue.build(); onChanged(); @@ -1053,14 +1053,14 @@ return this; } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto value) { + public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken value) { if (fsTokenBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && - fsToken_ != org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance()) { + fsToken_ != org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance()) { fsToken_ = - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.newBuilder(fsToken_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial(); } else { fsToken_ = value; } @@ -1072,11 +1072,11 @@ return this; } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ public Builder clearFsToken() { if (fsTokenBuilder_ == null) { - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); onChanged(); } else { fsTokenBuilder_.clear(); @@ -1085,17 +1085,17 @@ return this; } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder getFsTokenBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder getFsTokenBuilder() { bitField0_ |= 0x00000004; onChanged(); return getFsTokenFieldBuilder().getBuilder(); } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { if (fsTokenBuilder_ != null) { return fsTokenBuilder_.getMessageOrBuilder(); } else { @@ -1103,14 +1103,14 @@ } } /** - * required .DelegationTokenProto fs_token = 3; + * required .DelegationToken fs_token = 3; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder> getFsTokenFieldBuilder() { if (fsTokenBuilder_ == null) { fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder>( fsToken_, getParentForChildren(), isClean()); @@ -1646,7 +1646,7 @@ // @@protoc_insertion_point(class_scope:SecureBulkLoadHFilesResponse) } - public interface DelegationTokenProtoOrBuilder + public interface DelegationTokenOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes identifier = 1; @@ -1700,24 +1700,24 @@ getServiceBytes(); } /** - * Protobuf type {@code DelegationTokenProto} + * Protobuf type {@code DelegationToken} */ - public static final class DelegationTokenProto extends + public static final class DelegationToken extends com.google.protobuf.GeneratedMessage - implements DelegationTokenProtoOrBuilder { - // Use DelegationTokenProto.newBuilder() to construct. - private DelegationTokenProto(com.google.protobuf.GeneratedMessage.Builder builder) { + implements DelegationTokenOrBuilder { + // Use DelegationToken.newBuilder() to construct. + private DelegationToken(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private DelegationTokenProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final DelegationTokenProto defaultInstance; - public static DelegationTokenProto getDefaultInstance() { + private static final DelegationToken defaultInstance; + public static DelegationToken getDefaultInstance() { return defaultInstance; } - public DelegationTokenProto getDefaultInstanceForType() { + public DelegationToken getDefaultInstanceForType() { return defaultInstance; } @@ -1727,7 +1727,7 @@ getUnknownFields() { return this.unknownFields; } - private DelegationTokenProto( + private DelegationToken( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -1784,28 +1784,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationToken_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DelegationTokenProto parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DelegationToken parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new DelegationTokenProto(input, extensionRegistry); + return new DelegationToken(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -2000,10 +2000,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto) obj; + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken) obj; boolean result = true; result = result && (hasIdentifier() == other.hasIdentifier()); @@ -2060,53 +2060,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2115,7 +2115,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2127,24 +2127,24 @@ return builder; } /** - * Protobuf type {@code DelegationTokenProto} + * Protobuf type {@code DelegationToken} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationToken_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2181,23 +2181,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationToken_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto build() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken build() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto(this); + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -2222,16 +2222,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken.getDefaultInstance()) return this; if (other.hasIdentifier()) { setIdentifier(other.getIdentifier()); } @@ -2260,11 +2260,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationToken) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -2495,15 +2495,15 @@ return this; } - // @@protoc_insertion_point(builder_scope:DelegationTokenProto) + // @@protoc_insertion_point(builder_scope:DelegationToken) } static { - defaultInstance = new DelegationTokenProto(true); + defaultInstance = new DelegationToken(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:DelegationTokenProto) + // @@protoc_insertion_point(class_scope:DelegationToken) } public interface PrepareBulkLoadRequestOrBuilder @@ -4824,10 +4824,10 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_DelegationTokenProto_descriptor; + internal_static_DelegationToken_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DelegationTokenProto_fieldAccessorTable; + internal_static_DelegationToken_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_PrepareBulkLoadRequest_descriptor; private static @@ -4857,28 +4857,28 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\024SecureBulkLoad.proto\032\013hbase.proto\032\014Cli" + - "ent.proto\"\251\001\n\033SecureBulkLoadHFilesReques" + + "\n\024SecureBulkLoad.proto\032\013HBase.proto\032\014Cli" + + "ent.proto\"\244\001\n\033SecureBulkLoadHFilesReques" + "t\0225\n\013family_path\030\001 \003(\0132 .BulkLoadHFileRe" + "quest.FamilyPath\022\026\n\016assign_seq_num\030\002 \001(\010" + - "\022\'\n\010fs_token\030\003 \002(\0132\025.DelegationTokenProt" + - "o\022\022\n\nbulk_token\030\004 \002(\t\".\n\034SecureBulkLoadH" + - "FilesResponse\022\016\n\006loaded\030\001 \002(\010\"[\n\024Delegat" + - "ionTokenProto\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010pas" + - "sword\030\002 \001(\014\022\014\n\004kind\030\003 \001(\t\022\017\n\007service\030\004 \001" + - "(\t\"8\n\026PrepareBulkLoadRequest\022\036\n\ntable_na", - "me\030\001 \002(\0132\n.TableName\"-\n\027PrepareBulkLoadR" + - "esponse\022\022\n\nbulk_token\030\001 \002(\t\",\n\026CleanupBu" + - "lkLoadRequest\022\022\n\nbulk_token\030\001 \002(\t\"\031\n\027Cle" + - "anupBulkLoadResponse2\370\001\n\025SecureBulkLoadS" + - "ervice\022D\n\017PrepareBulkLoad\022\027.PrepareBulkL" + - "oadRequest\032\030.PrepareBulkLoadResponse\022S\n\024" + - "SecureBulkLoadHFiles\022\034.SecureBulkLoadHFi" + - "lesRequest\032\035.SecureBulkLoadHFilesRespons" + - "e\022D\n\017CleanupBulkLoad\022\027.CleanupBulkLoadRe" + - "quest\032\030.CleanupBulkLoadResponseBJ\n*org.a", - "pache.hadoop.hbase.protobuf.generatedB\024S" + - "ecureBulkLoadProtosH\001\210\001\001\240\001\001" + "\022\"\n\010fs_token\030\003 \002(\0132\020.DelegationToken\022\022\n\n" + + "bulk_token\030\004 \002(\t\".\n\034SecureBulkLoadHFiles" + + "Response\022\016\n\006loaded\030\001 \002(\010\"V\n\017DelegationTo" + + "ken\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010password\030\002 \001(" + + "\014\022\014\n\004kind\030\003 \001(\t\022\017\n\007service\030\004 \001(\t\"8\n\026Prep" + + "areBulkLoadRequest\022\036\n\ntable_name\030\001 \002(\0132\n", + ".TableName\"-\n\027PrepareBulkLoadResponse\022\022\n" + + "\nbulk_token\030\001 \002(\t\",\n\026CleanupBulkLoadRequ" + + "est\022\022\n\nbulk_token\030\001 \002(\t\"\031\n\027CleanupBulkLo" + + "adResponse2\370\001\n\025SecureBulkLoadService\022D\n\017" + + "PrepareBulkLoad\022\027.PrepareBulkLoadRequest" + + "\032\030.PrepareBulkLoadResponse\022S\n\024SecureBulk" + + "LoadHFiles\022\034.SecureBulkLoadHFilesRequest" + + "\032\035.SecureBulkLoadHFilesResponse\022D\n\017Clean" + + "upBulkLoad\022\027.CleanupBulkLoadRequest\032\030.Cl" + + "eanupBulkLoadResponseBJ\n*org.apache.hado", + "op.hbase.protobuf.generatedB\024SecureBulkL" + + "oadProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -4897,11 +4897,11 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SecureBulkLoadHFilesResponse_descriptor, new java.lang.String[] { "Loaded", }); - internal_static_DelegationTokenProto_descriptor = + internal_static_DelegationToken_descriptor = getDescriptor().getMessageTypes().get(2); - internal_static_DelegationTokenProto_fieldAccessorTable = new + internal_static_DelegationToken_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DelegationTokenProto_descriptor, + internal_static_DelegationToken_descriptor, new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }); internal_static_PrepareBulkLoadRequest_descriptor = getDescriptor().getMessageTypes().get(3); Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterAdminProtos.java (working copy) @@ -15538,6 +15538,344 @@ // @@protoc_insertion_point(class_scope:ModifyNamespaceRequest) } + public interface ModifyNamespaceResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code ModifyNamespaceResponse} + */ + public static final class ModifyNamespaceResponse extends + com.google.protobuf.GeneratedMessage + implements ModifyNamespaceResponseOrBuilder { + // Use ModifyNamespaceResponse.newBuilder() to construct. + private ModifyNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ModifyNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ModifyNamespaceResponse defaultInstance; + public static ModifyNamespaceResponse getDefaultInstance() { + return defaultInstance; + } + + public ModifyNamespaceResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ModifyNamespaceResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ModifyNamespaceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ModifyNamespaceResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code ModifyNamespaceResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:ModifyNamespaceResponse) + } + + static { + defaultInstance = new ModifyNamespaceResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ModifyNamespaceResponse) + } + public interface GetNamespaceDescriptorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -16616,344 +16954,6 @@ // @@protoc_insertion_point(class_scope:GetNamespaceDescriptorResponse) } - public interface ModifyNamespaceResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code ModifyNamespaceResponse} - */ - public static final class ModifyNamespaceResponse extends - com.google.protobuf.GeneratedMessage - implements ModifyNamespaceResponseOrBuilder { - // Use ModifyNamespaceResponse.newBuilder() to construct. - private ModifyNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ModifyNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ModifyNamespaceResponse defaultInstance; - public static ModifyNamespaceResponse getDefaultInstance() { - return defaultInstance; - } - - public ModifyNamespaceResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ModifyNamespaceResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ModifyNamespaceResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ModifyNamespaceResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code ModifyNamespaceResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ModifyNamespaceResponse_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyNamespaceResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - // @@protoc_insertion_point(builder_scope:ModifyNamespaceResponse) - } - - static { - defaultInstance = new ModifyNamespaceResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ModifyNamespaceResponse) - } - public interface ListNamespaceDescriptorsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } @@ -23587,28 +23587,28 @@ // @@protoc_insertion_point(class_scope:SetBalancerRunningResponse) } - public interface CatalogScanRequestOrBuilder + public interface RunCatalogScanRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** - * Protobuf type {@code CatalogScanRequest} + * Protobuf type {@code RunCatalogScanRequest} */ - public static final class CatalogScanRequest extends + public static final class RunCatalogScanRequest extends com.google.protobuf.GeneratedMessage - implements CatalogScanRequestOrBuilder { - // Use CatalogScanRequest.newBuilder() to construct. - private CatalogScanRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements RunCatalogScanRequestOrBuilder { + // Use RunCatalogScanRequest.newBuilder() to construct. + private RunCatalogScanRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private CatalogScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private RunCatalogScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final CatalogScanRequest defaultInstance; - public static CatalogScanRequest getDefaultInstance() { + private static final RunCatalogScanRequest defaultInstance; + public static RunCatalogScanRequest getDefaultInstance() { return defaultInstance; } - public CatalogScanRequest getDefaultInstanceForType() { + public RunCatalogScanRequest getDefaultInstanceForType() { return defaultInstance; } @@ -23618,7 +23618,7 @@ getUnknownFields() { return this.unknownFields; } - private CatalogScanRequest( + private RunCatalogScanRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -23654,28 +23654,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CatalogScanRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RunCatalogScanRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new CatalogScanRequest(input, extensionRegistry); + return new RunCatalogScanRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -23719,10 +23719,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest) obj; boolean result = true; result = result && @@ -23743,53 +23743,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -23798,7 +23798,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -23810,24 +23810,24 @@ return builder; } /** - * Protobuf type {@code CatalogScanRequest} + * Protobuf type {@code RunCatalogScanRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -23856,38 +23856,38 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -23900,11 +23900,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -23914,18 +23914,18 @@ return this; } - // @@protoc_insertion_point(builder_scope:CatalogScanRequest) + // @@protoc_insertion_point(builder_scope:RunCatalogScanRequest) } static { - defaultInstance = new CatalogScanRequest(true); + defaultInstance = new RunCatalogScanRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:CatalogScanRequest) + // @@protoc_insertion_point(class_scope:RunCatalogScanRequest) } - public interface CatalogScanResponseOrBuilder + public interface RunCatalogScanResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional int32 scan_result = 1; @@ -23939,24 +23939,24 @@ int getScanResult(); } /** - * Protobuf type {@code CatalogScanResponse} + * Protobuf type {@code RunCatalogScanResponse} */ - public static final class CatalogScanResponse extends + public static final class RunCatalogScanResponse extends com.google.protobuf.GeneratedMessage - implements CatalogScanResponseOrBuilder { - // Use CatalogScanResponse.newBuilder() to construct. - private CatalogScanResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements RunCatalogScanResponseOrBuilder { + // Use RunCatalogScanResponse.newBuilder() to construct. + private RunCatalogScanResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private CatalogScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private RunCatalogScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final CatalogScanResponse defaultInstance; - public static CatalogScanResponse getDefaultInstance() { + private static final RunCatalogScanResponse defaultInstance; + public static RunCatalogScanResponse getDefaultInstance() { return defaultInstance; } - public CatalogScanResponse getDefaultInstanceForType() { + public RunCatalogScanResponse getDefaultInstanceForType() { return defaultInstance; } @@ -23966,7 +23966,7 @@ getUnknownFields() { return this.unknownFields; } - private CatalogScanResponse( + private RunCatalogScanResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -24008,28 +24008,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CatalogScanResponse parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RunCatalogScanResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new CatalogScanResponse(input, extensionRegistry); + return new RunCatalogScanResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -24098,10 +24098,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse) obj; boolean result = true; result = result && (hasScanResult() == other.hasScanResult()); @@ -24131,53 +24131,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -24186,7 +24186,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -24198,24 +24198,24 @@ return builder; } /** - * Protobuf type {@code CatalogScanResponse} + * Protobuf type {@code RunCatalogScanResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -24246,23 +24246,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_CatalogScanResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_RunCatalogScanResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -24275,16 +24275,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.getDefaultInstance()) return this; if (other.hasScanResult()) { setScanResult(other.getScanResult()); } @@ -24300,11 +24300,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -24348,15 +24348,15 @@ return this; } - // @@protoc_insertion_point(builder_scope:CatalogScanResponse) + // @@protoc_insertion_point(builder_scope:RunCatalogScanResponse) } static { - defaultInstance = new CatalogScanResponse(true); + defaultInstance = new RunCatalogScanResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:CatalogScanResponse) + // @@protoc_insertion_point(class_scope:RunCatalogScanResponse) } public interface EnableCatalogJanitorRequestOrBuilder @@ -26015,7 +26015,7 @@ // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledResponse) } - public interface TakeSnapshotRequestOrBuilder + public interface SnapshotRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .SnapshotDescription snapshot = 1; @@ -26033,24 +26033,24 @@ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder(); } /** - * Protobuf type {@code TakeSnapshotRequest} + * Protobuf type {@code SnapshotRequest} */ - public static final class TakeSnapshotRequest extends + public static final class SnapshotRequest extends com.google.protobuf.GeneratedMessage - implements TakeSnapshotRequestOrBuilder { - // Use TakeSnapshotRequest.newBuilder() to construct. - private TakeSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements SnapshotRequestOrBuilder { + // Use SnapshotRequest.newBuilder() to construct. + private SnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private TakeSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private SnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final TakeSnapshotRequest defaultInstance; - public static TakeSnapshotRequest getDefaultInstance() { + private static final SnapshotRequest defaultInstance; + public static SnapshotRequest getDefaultInstance() { return defaultInstance; } - public TakeSnapshotRequest getDefaultInstanceForType() { + public SnapshotRequest getDefaultInstanceForType() { return defaultInstance; } @@ -26060,7 +26060,7 @@ getUnknownFields() { return this.unknownFields; } - private TakeSnapshotRequest( + private SnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -26110,28 +26110,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TakeSnapshotRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SnapshotRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new TakeSnapshotRequest(input, extensionRegistry); + return new SnapshotRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -26214,10 +26214,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest) obj; boolean result = true; result = result && (hasSnapshot() == other.hasSnapshot()); @@ -26247,53 +26247,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -26302,7 +26302,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -26314,24 +26314,24 @@ return builder; } /** - * Protobuf type {@code TakeSnapshotRequest} + * Protobuf type {@code SnapshotRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -26367,23 +26367,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -26400,16 +26400,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.getDefaultInstance()) return this; if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } @@ -26433,11 +26433,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -26565,18 +26565,18 @@ return snapshotBuilder_; } - // @@protoc_insertion_point(builder_scope:TakeSnapshotRequest) + // @@protoc_insertion_point(builder_scope:SnapshotRequest) } static { - defaultInstance = new TakeSnapshotRequest(true); + defaultInstance = new SnapshotRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:TakeSnapshotRequest) + // @@protoc_insertion_point(class_scope:SnapshotRequest) } - public interface TakeSnapshotResponseOrBuilder + public interface SnapshotResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required int64 expected_timeout = 1; @@ -26590,24 +26590,24 @@ long getExpectedTimeout(); } /** - * Protobuf type {@code TakeSnapshotResponse} + * Protobuf type {@code SnapshotResponse} */ - public static final class TakeSnapshotResponse extends + public static final class SnapshotResponse extends com.google.protobuf.GeneratedMessage - implements TakeSnapshotResponseOrBuilder { - // Use TakeSnapshotResponse.newBuilder() to construct. - private TakeSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements SnapshotResponseOrBuilder { + // Use SnapshotResponse.newBuilder() to construct. + private SnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private TakeSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private SnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final TakeSnapshotResponse defaultInstance; - public static TakeSnapshotResponse getDefaultInstance() { + private static final SnapshotResponse defaultInstance; + public static SnapshotResponse getDefaultInstance() { return defaultInstance; } - public TakeSnapshotResponse getDefaultInstanceForType() { + public SnapshotResponse getDefaultInstanceForType() { return defaultInstance; } @@ -26617,7 +26617,7 @@ getUnknownFields() { return this.unknownFields; } - private TakeSnapshotResponse( + private SnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -26659,28 +26659,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TakeSnapshotResponse parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SnapshotResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new TakeSnapshotResponse(input, extensionRegistry); + return new SnapshotResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -26753,10 +26753,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse) obj; boolean result = true; result = result && (hasExpectedTimeout() == other.hasExpectedTimeout()); @@ -26786,53 +26786,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -26841,7 +26841,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -26853,24 +26853,24 @@ return builder; } /** - * Protobuf type {@code TakeSnapshotResponse} + * Protobuf type {@code SnapshotResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -26901,23 +26901,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_TakeSnapshotResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_SnapshotResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -26930,16 +26930,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.getDefaultInstance()) return this; if (other.hasExpectedTimeout()) { setExpectedTimeout(other.getExpectedTimeout()); } @@ -26959,11 +26959,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -27007,39 +27007,39 @@ return this; } - // @@protoc_insertion_point(builder_scope:TakeSnapshotResponse) + // @@protoc_insertion_point(builder_scope:SnapshotResponse) } static { - defaultInstance = new TakeSnapshotResponse(true); + defaultInstance = new SnapshotResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:TakeSnapshotResponse) + // @@protoc_insertion_point(class_scope:SnapshotResponse) } - public interface ListSnapshotRequestOrBuilder + public interface GetCompletedSnapshotsRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** - * Protobuf type {@code ListSnapshotRequest} + * Protobuf type {@code GetCompletedSnapshotsRequest} */ - public static final class ListSnapshotRequest extends + public static final class GetCompletedSnapshotsRequest extends com.google.protobuf.GeneratedMessage - implements ListSnapshotRequestOrBuilder { - // Use ListSnapshotRequest.newBuilder() to construct. - private ListSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements GetCompletedSnapshotsRequestOrBuilder { + // Use GetCompletedSnapshotsRequest.newBuilder() to construct. + private GetCompletedSnapshotsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private ListSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private GetCompletedSnapshotsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final ListSnapshotRequest defaultInstance; - public static ListSnapshotRequest getDefaultInstance() { + private static final GetCompletedSnapshotsRequest defaultInstance; + public static GetCompletedSnapshotsRequest getDefaultInstance() { return defaultInstance; } - public ListSnapshotRequest getDefaultInstanceForType() { + public GetCompletedSnapshotsRequest getDefaultInstanceForType() { return defaultInstance; } @@ -27049,7 +27049,7 @@ getUnknownFields() { return this.unknownFields; } - private ListSnapshotRequest( + private GetCompletedSnapshotsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -27085,28 +27085,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListSnapshotRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetCompletedSnapshotsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new ListSnapshotRequest(input, extensionRegistry); + return new GetCompletedSnapshotsRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -27150,10 +27150,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest) obj; boolean result = true; result = result && @@ -27174,53 +27174,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -27229,7 +27229,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -27241,24 +27241,24 @@ return builder; } /** - * Protobuf type {@code ListSnapshotRequest} + * Protobuf type {@code GetCompletedSnapshotsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -27287,38 +27287,38 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -27331,11 +27331,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -27345,18 +27345,18 @@ return this; } - // @@protoc_insertion_point(builder_scope:ListSnapshotRequest) + // @@protoc_insertion_point(builder_scope:GetCompletedSnapshotsRequest) } static { - defaultInstance = new ListSnapshotRequest(true); + defaultInstance = new GetCompletedSnapshotsRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:ListSnapshotRequest) + // @@protoc_insertion_point(class_scope:GetCompletedSnapshotsRequest) } - public interface ListSnapshotResponseOrBuilder + public interface GetCompletedSnapshotsResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .SnapshotDescription snapshots = 1; @@ -27385,24 +27385,24 @@ int index); } /** - * Protobuf type {@code ListSnapshotResponse} + * Protobuf type {@code GetCompletedSnapshotsResponse} */ - public static final class ListSnapshotResponse extends + public static final class GetCompletedSnapshotsResponse extends com.google.protobuf.GeneratedMessage - implements ListSnapshotResponseOrBuilder { - // Use ListSnapshotResponse.newBuilder() to construct. - private ListSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements GetCompletedSnapshotsResponseOrBuilder { + // Use GetCompletedSnapshotsResponse.newBuilder() to construct. + private GetCompletedSnapshotsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private ListSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private GetCompletedSnapshotsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final ListSnapshotResponse defaultInstance; - public static ListSnapshotResponse getDefaultInstance() { + private static final GetCompletedSnapshotsResponse defaultInstance; + public static GetCompletedSnapshotsResponse getDefaultInstance() { return defaultInstance; } - public ListSnapshotResponse getDefaultInstanceForType() { + public GetCompletedSnapshotsResponse getDefaultInstanceForType() { return defaultInstance; } @@ -27412,7 +27412,7 @@ getUnknownFields() { return this.unknownFields; } - private ListSnapshotResponse( + private GetCompletedSnapshotsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -27460,28 +27460,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ListSnapshotResponse parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetCompletedSnapshotsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new ListSnapshotResponse(input, extensionRegistry); + return new GetCompletedSnapshotsResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -27575,10 +27575,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse) obj; boolean result = true; result = result && getSnapshotsList() @@ -27605,53 +27605,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -27660,7 +27660,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -27672,24 +27672,24 @@ return builder; } /** - * Protobuf type {@code ListSnapshotResponse} + * Protobuf type {@code GetCompletedSnapshotsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -27725,23 +27725,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_ListSnapshotResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.internal_static_GetCompletedSnapshotsResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse build() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse(this); int from_bitField0_ = bitField0_; if (snapshotsBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { @@ -27757,16 +27757,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.getDefaultInstance()) return this; if (snapshotsBuilder_ == null) { if (!other.snapshots_.isEmpty()) { if (snapshots_.isEmpty()) { @@ -27811,11 +27811,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -28066,15 +28066,15 @@ return snapshotsBuilder_; } - // @@protoc_insertion_point(builder_scope:ListSnapshotResponse) + // @@protoc_insertion_point(builder_scope:GetCompletedSnapshotsResponse) } static { - defaultInstance = new ListSnapshotResponse(true); + defaultInstance = new GetCompletedSnapshotsResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:ListSnapshotResponse) + // @@protoc_insertion_point(class_scope:GetCompletedSnapshotsResponse) } public interface DeleteSnapshotRequestOrBuilder @@ -32306,7 +32306,7 @@ com.google.protobuf.RpcCallback done); /** - * rpc RunCatalogScan(.CatalogScanRequest) returns (.CatalogScanResponse); + * rpc RunCatalogScan(.RunCatalogScanRequest) returns (.RunCatalogScanResponse); * *
        ** Get a run of the catalog janitor 
@@ -32314,8 +32314,8 @@
        */
       public abstract void runCatalogScan(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request,
-          com.google.protobuf.RpcCallback done);
+          org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest request,
+          com.google.protobuf.RpcCallback done);
 
       /**
        * rpc EnableCatalogJanitor(.EnableCatalogJanitorRequest) returns (.EnableCatalogJanitorResponse);
@@ -32357,7 +32357,7 @@
           com.google.protobuf.RpcCallback done);
 
       /**
-       * rpc Snapshot(.TakeSnapshotRequest) returns (.TakeSnapshotResponse);
+       * rpc Snapshot(.SnapshotRequest) returns (.SnapshotResponse);
        *
        * 
        ** 
@@ -32366,22 +32366,22 @@
        */
       public abstract void snapshot(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request,
-          com.google.protobuf.RpcCallback done);
+          org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest request,
+          com.google.protobuf.RpcCallback done);
 
       /**
-       * rpc GetCompletedSnapshots(.ListSnapshotRequest) returns (.ListSnapshotResponse);
+       * rpc GetCompletedSnapshots(.GetCompletedSnapshotsRequest) returns (.GetCompletedSnapshotsResponse);
        *
        * 
        **
-       * List completed snapshots.
+       * Get completed snapshots.
        * Returns a list of snapshot descriptors for completed snapshots
        * 
*/ public abstract void getCompletedSnapshots( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest request, + com.google.protobuf.RpcCallback done); /** * rpc DeleteSnapshot(.DeleteSnapshotRequest) returns (.DeleteSnapshotResponse); @@ -32475,7 +32475,7 @@ * rpc DeleteNamespace(.DeleteNamespaceRequest) returns (.DeleteNamespaceResponse); * *
-       ** Delete's namespace synchronously 
+       ** Deletes namespace synchronously 
        * 
*/ public abstract void deleteNamespace( @@ -32675,8 +32675,8 @@ @java.lang.Override public void runCatalogScan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest request, + com.google.protobuf.RpcCallback done) { impl.runCatalogScan(controller, request, done); } @@ -32707,16 +32707,16 @@ @java.lang.Override public void snapshot( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest request, + com.google.protobuf.RpcCallback done) { impl.snapshot(controller, request, done); } @java.lang.Override public void getCompletedSnapshots( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest request, + com.google.protobuf.RpcCallback done) { impl.getCompletedSnapshots(controller, request, done); } @@ -32873,7 +32873,7 @@ case 16: return impl.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest)request); case 17: - return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)request); + return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest)request); case 18: return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest)request); case 19: @@ -32881,9 +32881,9 @@ case 20: return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); case 21: - return impl.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest)request); + return impl.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest)request); case 22: - return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest)request); + return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest)request); case 23: return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest)request); case 24: @@ -32957,7 +32957,7 @@ case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); case 17: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); case 19: @@ -32965,9 +32965,9 @@ case 20: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 21: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.getDefaultInstance(); case 22: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); case 23: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.getDefaultInstance(); case 24: @@ -33041,7 +33041,7 @@ case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); case 17: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); case 19: @@ -33049,9 +33049,9 @@ case 20: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 21: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.getDefaultInstance(); case 22: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); case 23: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.getDefaultInstance(); case 24: @@ -33302,7 +33302,7 @@ com.google.protobuf.RpcCallback done); /** - * rpc RunCatalogScan(.CatalogScanRequest) returns (.CatalogScanResponse); + * rpc RunCatalogScan(.RunCatalogScanRequest) returns (.RunCatalogScanResponse); * *
      ** Get a run of the catalog janitor 
@@ -33310,8 +33310,8 @@
      */
     public abstract void runCatalogScan(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request,
-        com.google.protobuf.RpcCallback done);
+        org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest request,
+        com.google.protobuf.RpcCallback done);
 
     /**
      * rpc EnableCatalogJanitor(.EnableCatalogJanitorRequest) returns (.EnableCatalogJanitorResponse);
@@ -33353,7 +33353,7 @@
         com.google.protobuf.RpcCallback done);
 
     /**
-     * rpc Snapshot(.TakeSnapshotRequest) returns (.TakeSnapshotResponse);
+     * rpc Snapshot(.SnapshotRequest) returns (.SnapshotResponse);
      *
      * 
      ** 
@@ -33362,22 +33362,22 @@
      */
     public abstract void snapshot(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request,
-        com.google.protobuf.RpcCallback done);
+        org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest request,
+        com.google.protobuf.RpcCallback done);
 
     /**
-     * rpc GetCompletedSnapshots(.ListSnapshotRequest) returns (.ListSnapshotResponse);
+     * rpc GetCompletedSnapshots(.GetCompletedSnapshotsRequest) returns (.GetCompletedSnapshotsResponse);
      *
      * 
      **
-     * List completed snapshots.
+     * Get completed snapshots.
      * Returns a list of snapshot descriptors for completed snapshots
      * 
*/ public abstract void getCompletedSnapshots( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest request, + com.google.protobuf.RpcCallback done); /** * rpc DeleteSnapshot(.DeleteSnapshotRequest) returns (.DeleteSnapshotResponse); @@ -33471,7 +33471,7 @@ * rpc DeleteNamespace(.DeleteNamespaceRequest) returns (.DeleteNamespaceResponse); * *
-     ** Delete's namespace synchronously 
+     ** Deletes namespace synchronously 
      * 
*/ public abstract void deleteNamespace( @@ -33635,8 +33635,8 @@ done)); return; case 17: - this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 18: @@ -33655,13 +33655,13 @@ done)); return; case 21: - this.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 22: - this.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 23: @@ -33773,7 +33773,7 @@ case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest.getDefaultInstance(); case 17: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorRequest.getDefaultInstance(); case 19: @@ -33781,9 +33781,9 @@ case 20: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 21: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest.getDefaultInstance(); case 22: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest.getDefaultInstance(); case 23: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest.getDefaultInstance(); case 24: @@ -33857,7 +33857,7 @@ case 16: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningResponse.getDefaultInstance(); case 17: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.getDefaultInstance(); case 18: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse.getDefaultInstance(); case 19: @@ -33865,9 +33865,9 @@ case 20: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 21: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.getDefaultInstance(); case 22: - return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.getDefaultInstance(); case 23: return org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse.getDefaultInstance(); case 24: @@ -34170,17 +34170,17 @@ public void runCatalogScan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(17), controller, request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.getDefaultInstance())); } public void enableCatalogJanitor( @@ -34230,32 +34230,32 @@ public void snapshot( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(21), controller, request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.getDefaultInstance())); } public void getCompletedSnapshots( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(22), controller, request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.class, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.getDefaultInstance())); } public void deleteSnapshot( @@ -34530,9 +34530,9 @@ org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse runCatalogScan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse enableCatalogJanitor( @@ -34550,14 +34550,14 @@ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse snapshot( + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse snapshot( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request) + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest request) throws com.google.protobuf.ServiceException; - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse getCompletedSnapshots( + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse getCompletedSnapshots( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request) + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotResponse deleteSnapshot( @@ -34832,15 +34832,15 @@ } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse runCatalogScan( + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse runCatalogScan( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest request) + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(17), controller, request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse.getDefaultInstance()); } @@ -34880,27 +34880,27 @@ } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse snapshot( + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse snapshot( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest request) + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(21), controller, request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse.getDefaultInstance()); } - public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse getCompletedSnapshots( + public org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse getCompletedSnapshots( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest request) + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(22), controller, request, - org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse.getDefaultInstance()); } @@ -35208,6 +35208,11 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ModifyNamespaceRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_ModifyNamespaceResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ModifyNamespaceResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_GetNamespaceDescriptorRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -35218,11 +35223,6 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_ModifyNamespaceResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ModifyNamespaceResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor internal_static_ListNamespaceDescriptorsRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -35293,15 +35293,15 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SetBalancerRunningResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_CatalogScanRequest_descriptor; + internal_static_RunCatalogScanRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CatalogScanRequest_fieldAccessorTable; + internal_static_RunCatalogScanRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_CatalogScanResponse_descriptor; + internal_static_RunCatalogScanResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CatalogScanResponse_fieldAccessorTable; + internal_static_RunCatalogScanResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_EnableCatalogJanitorRequest_descriptor; private static @@ -35323,25 +35323,25 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_TakeSnapshotRequest_descriptor; + internal_static_SnapshotRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TakeSnapshotRequest_fieldAccessorTable; + internal_static_SnapshotRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_TakeSnapshotResponse_descriptor; + internal_static_SnapshotResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TakeSnapshotResponse_fieldAccessorTable; + internal_static_SnapshotResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_ListSnapshotRequest_descriptor; + internal_static_GetCompletedSnapshotsRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ListSnapshotRequest_fieldAccessorTable; + internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_ListSnapshotResponse_descriptor; + internal_static_GetCompletedSnapshotsResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ListSnapshotResponse_fieldAccessorTable; + internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_DeleteSnapshotRequest_descriptor; private static @@ -35391,7 +35391,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\021MasterAdmin.proto\032\014Master.proto\032\013hbase" + + "\n\021MasterAdmin.proto\032\014Master.proto\032\013HBase" + ".proto\032\014Client.proto\"`\n\020AddColumnRequest" + "\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022,\n\017colu" + "mn_families\030\002 \002(\0132\023.ColumnFamilySchema\"\023" + @@ -35432,11 +35432,11 @@ "espaceRequest\022\025\n\rnamespaceName\030\001 \002(\t\"\031\n\027" + "DeleteNamespaceResponse\"K\n\026ModifyNamespa", "ceRequest\0221\n\023namespaceDescriptor\030\001 \002(\0132\024" + - ".NamespaceDescriptor\"6\n\035GetNamespaceDesc" + - "riptorRequest\022\025\n\rnamespaceName\030\001 \002(\t\"S\n\036" + - "GetNamespaceDescriptorResponse\0221\n\023namesp" + - "aceDescriptor\030\001 \002(\0132\024.NamespaceDescripto" + - "r\"\031\n\027ModifyNamespaceResponse\"!\n\037ListName" + + ".NamespaceDescriptor\"\031\n\027ModifyNamespaceR" + + "esponse\"6\n\035GetNamespaceDescriptorRequest" + + "\022\025\n\rnamespaceName\030\001 \002(\t\"S\n\036GetNamespaceD" + + "escriptorResponse\0221\n\023namespaceDescriptor" + + "\030\001 \002(\0132\024.NamespaceDescriptor\"!\n\037ListName" + "spaceDescriptorsRequest\"U\n ListNamespace" + "DescriptorsResponse\0221\n\023namespaceDescript" + "or\030\001 \003(\0132\024.NamespaceDescriptor\"?\n&ListTa" + @@ -35453,93 +35453,94 @@ "\010\"<\n\031SetBalancerRunningRequest\022\n\n\002on\030\001 \002", "(\010\022\023\n\013synchronous\030\002 \001(\010\"8\n\032SetBalancerRu" + "nningResponse\022\032\n\022prev_balance_value\030\001 \001(" + - "\010\"\024\n\022CatalogScanRequest\"*\n\023CatalogScanRe" + - "sponse\022\023\n\013scan_result\030\001 \001(\005\"-\n\033EnableCat" + - "alogJanitorRequest\022\016\n\006enable\030\001 \002(\010\"2\n\034En" + - "ableCatalogJanitorResponse\022\022\n\nprev_value" + - "\030\001 \001(\010\" \n\036IsCatalogJanitorEnabledRequest" + - "\"0\n\037IsCatalogJanitorEnabledResponse\022\r\n\005v" + - "alue\030\001 \002(\010\"=\n\023TakeSnapshotRequest\022&\n\010sna" + - "pshot\030\001 \002(\0132\024.SnapshotDescription\"0\n\024Tak", - "eSnapshotResponse\022\030\n\020expected_timeout\030\001 " + - "\002(\003\"\025\n\023ListSnapshotRequest\"?\n\024ListSnapsh" + - "otResponse\022\'\n\tsnapshots\030\001 \003(\0132\024.Snapshot" + - "Description\"?\n\025DeleteSnapshotRequest\022&\n\010" + - "snapshot\030\001 \002(\0132\024.SnapshotDescription\"\030\n\026" + - "DeleteSnapshotResponse\"@\n\026RestoreSnapsho" + - "tRequest\022&\n\010snapshot\030\001 \002(\0132\024.SnapshotDes" + - "cription\"\031\n\027RestoreSnapshotResponse\"?\n\025I" + - "sSnapshotDoneRequest\022&\n\010snapshot\030\001 \001(\0132\024" + - ".SnapshotDescription\"U\n\026IsSnapshotDoneRe", - "sponse\022\023\n\004done\030\001 \001(\010:\005false\022&\n\010snapshot\030" + - "\002 \001(\0132\024.SnapshotDescription\"F\n\034IsRestore" + - "SnapshotDoneRequest\022&\n\010snapshot\030\001 \001(\0132\024." + - "SnapshotDescription\"3\n\035IsRestoreSnapshot" + - "DoneResponse\022\022\n\004done\030\001 \001(\010:\004true2\255\023\n\022Mas" + - "terAdminService\0222\n\tAddColumn\022\021.AddColumn" + - "Request\032\022.AddColumnResponse\022;\n\014DeleteCol" + - "umn\022\024.DeleteColumnRequest\032\025.DeleteColumn" + - "Response\022;\n\014ModifyColumn\022\024.ModifyColumnR" + - "equest\032\025.ModifyColumnResponse\0225\n\nMoveReg", - "ion\022\022.MoveRegionRequest\032\023.MoveRegionResp" + - "onse\022Y\n\026DispatchMergingRegions\022\036.Dispatc" + - "hMergingRegionsRequest\032\037.DispatchMerging" + - "RegionsResponse\022;\n\014AssignRegion\022\024.Assign" + - "RegionRequest\032\025.AssignRegionResponse\022A\n\016" + - "UnassignRegion\022\026.UnassignRegionRequest\032\027" + - ".UnassignRegionResponse\022>\n\rOfflineRegion" + - "\022\025.OfflineRegionRequest\032\026.OfflineRegionR" + - "esponse\0228\n\013DeleteTable\022\023.DeleteTableRequ" + - "est\032\024.DeleteTableResponse\0228\n\013EnableTable", - "\022\023.EnableTableRequest\032\024.EnableTableRespo" + - "nse\022;\n\014DisableTable\022\024.DisableTableReques" + - "t\032\025.DisableTableResponse\0228\n\013ModifyTable\022" + - "\023.ModifyTableRequest\032\024.ModifyTableRespon" + - "se\0228\n\013CreateTable\022\023.CreateTableRequest\032\024" + - ".CreateTableResponse\022/\n\010Shutdown\022\020.Shutd" + - "ownRequest\032\021.ShutdownResponse\0225\n\nStopMas" + - "ter\022\022.StopMasterRequest\032\023.StopMasterResp" + - "onse\022,\n\007Balance\022\017.BalanceRequest\032\020.Balan" + - "ceResponse\022M\n\022SetBalancerRunning\022\032.SetBa", - "lancerRunningRequest\032\033.SetBalancerRunnin" + - "gResponse\022;\n\016RunCatalogScan\022\023.CatalogSca" + - "nRequest\032\024.CatalogScanResponse\022S\n\024Enable" + - "CatalogJanitor\022\034.EnableCatalogJanitorReq" + - "uest\032\035.EnableCatalogJanitorResponse\022\\\n\027I" + - "sCatalogJanitorEnabled\022\037.IsCatalogJanito" + - "rEnabledRequest\032 .IsCatalogJanitorEnable" + - "dResponse\022L\n\021ExecMasterService\022\032.Coproce" + - "ssorServiceRequest\032\033.CoprocessorServiceR" + - "esponse\0227\n\010Snapshot\022\024.TakeSnapshotReques", - "t\032\025.TakeSnapshotResponse\022D\n\025GetCompleted" + - "Snapshots\022\024.ListSnapshotRequest\032\025.ListSn" + - "apshotResponse\022A\n\016DeleteSnapshot\022\026.Delet" + - "eSnapshotRequest\032\027.DeleteSnapshotRespons" + - "e\022A\n\016IsSnapshotDone\022\026.IsSnapshotDoneRequ" + - "est\032\027.IsSnapshotDoneResponse\022D\n\017RestoreS" + - "napshot\022\027.RestoreSnapshotRequest\032\030.Resto" + - "reSnapshotResponse\022V\n\025IsRestoreSnapshotD" + - "one\022\035.IsRestoreSnapshotDoneRequest\032\036.IsR" + - "estoreSnapshotDoneResponse\022D\n\017IsMasterRu", - "nning\022\027.IsMasterRunningRequest\032\030.IsMaste" + - "rRunningResponse\022D\n\017ModifyNamespace\022\027.Mo" + - "difyNamespaceRequest\032\030.ModifyNamespaceRe" + - "sponse\022D\n\017CreateNamespace\022\027.CreateNamesp" + - "aceRequest\032\030.CreateNamespaceResponse\022D\n\017" + - "DeleteNamespace\022\027.DeleteNamespaceRequest" + - "\032\030.DeleteNamespaceResponse\022Y\n\026GetNamespa" + - "ceDescriptor\022\036.GetNamespaceDescriptorReq" + - "uest\032\037.GetNamespaceDescriptorResponse\022_\n" + - "\030ListNamespaceDescriptors\022 .ListNamespac", - "eDescriptorsRequest\032!.ListNamespaceDescr" + - "iptorsResponse\022t\n\037ListTableDescriptorsBy" + - "Namespace\022\'.ListTableDescriptorsByNamesp" + - "aceRequest\032(.ListTableDescriptorsByNames" + - "paceResponse\022b\n\031ListTableNamesByNamespac" + - "e\022!.ListTableNamesByNamespaceRequest\032\".L" + - "istTableNamesByNamespaceResponseBG\n*org." + - "apache.hadoop.hbase.protobuf.generatedB\021" + - "MasterAdminProtosH\001\210\001\001\240\001\001" + "\010\"\027\n\025RunCatalogScanRequest\"-\n\026RunCatalog" + + "ScanResponse\022\023\n\013scan_result\030\001 \001(\005\"-\n\033Ena" + + "bleCatalogJanitorRequest\022\016\n\006enable\030\001 \002(\010" + + "\"2\n\034EnableCatalogJanitorResponse\022\022\n\nprev" + + "_value\030\001 \001(\010\" \n\036IsCatalogJanitorEnabledR" + + "equest\"0\n\037IsCatalogJanitorEnabledRespons" + + "e\022\r\n\005value\030\001 \002(\010\"9\n\017SnapshotRequest\022&\n\010s" + + "napshot\030\001 \002(\0132\024.SnapshotDescription\",\n\020S", + "napshotResponse\022\030\n\020expected_timeout\030\001 \002(" + + "\003\"\036\n\034GetCompletedSnapshotsRequest\"H\n\035Get" + + "CompletedSnapshotsResponse\022\'\n\tsnapshots\030" + + "\001 \003(\0132\024.SnapshotDescription\"?\n\025DeleteSna" + + "pshotRequest\022&\n\010snapshot\030\001 \002(\0132\024.Snapsho" + + "tDescription\"\030\n\026DeleteSnapshotResponse\"@" + + "\n\026RestoreSnapshotRequest\022&\n\010snapshot\030\001 \002" + + "(\0132\024.SnapshotDescription\"\031\n\027RestoreSnaps" + + "hotResponse\"?\n\025IsSnapshotDoneRequest\022&\n\010" + + "snapshot\030\001 \001(\0132\024.SnapshotDescription\"U\n\026", + "IsSnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005fa" + + "lse\022&\n\010snapshot\030\002 \001(\0132\024.SnapshotDescript" + + "ion\"F\n\034IsRestoreSnapshotDoneRequest\022&\n\010s" + + "napshot\030\001 \001(\0132\024.SnapshotDescription\"3\n\035I" + + "sRestoreSnapshotDoneResponse\022\022\n\004done\030\001 \001" + + "(\010:\004true2\275\023\n\022MasterAdminService\0222\n\tAddCo" + + "lumn\022\021.AddColumnRequest\032\022.AddColumnRespo" + + "nse\022;\n\014DeleteColumn\022\024.DeleteColumnReques" + + "t\032\025.DeleteColumnResponse\022;\n\014ModifyColumn" + + "\022\024.ModifyColumnRequest\032\025.ModifyColumnRes", + "ponse\0225\n\nMoveRegion\022\022.MoveRegionRequest\032" + + "\023.MoveRegionResponse\022Y\n\026DispatchMergingR" + + "egions\022\036.DispatchMergingRegionsRequest\032\037" + + ".DispatchMergingRegionsResponse\022;\n\014Assig" + + "nRegion\022\024.AssignRegionRequest\032\025.AssignRe" + + "gionResponse\022A\n\016UnassignRegion\022\026.Unassig" + + "nRegionRequest\032\027.UnassignRegionResponse\022" + + ">\n\rOfflineRegion\022\025.OfflineRegionRequest\032" + + "\026.OfflineRegionResponse\0228\n\013DeleteTable\022\023" + + ".DeleteTableRequest\032\024.DeleteTableRespons", + "e\0228\n\013EnableTable\022\023.EnableTableRequest\032\024." + + "EnableTableResponse\022;\n\014DisableTable\022\024.Di" + + "sableTableRequest\032\025.DisableTableResponse" + + "\0228\n\013ModifyTable\022\023.ModifyTableRequest\032\024.M" + + "odifyTableResponse\0228\n\013CreateTable\022\023.Crea" + + "teTableRequest\032\024.CreateTableResponse\022/\n\010" + + "Shutdown\022\020.ShutdownRequest\032\021.ShutdownRes" + + "ponse\0225\n\nStopMaster\022\022.StopMasterRequest\032" + + "\023.StopMasterResponse\022,\n\007Balance\022\017.Balanc" + + "eRequest\032\020.BalanceResponse\022M\n\022SetBalance", + "rRunning\022\032.SetBalancerRunningRequest\032\033.S" + + "etBalancerRunningResponse\022A\n\016RunCatalogS" + + "can\022\026.RunCatalogScanRequest\032\027.RunCatalog" + + "ScanResponse\022S\n\024EnableCatalogJanitor\022\034.E" + + "nableCatalogJanitorRequest\032\035.EnableCatal" + + "ogJanitorResponse\022\\\n\027IsCatalogJanitorEna" + + "bled\022\037.IsCatalogJanitorEnabledRequest\032 ." + + "IsCatalogJanitorEnabledResponse\022L\n\021ExecM" + + "asterService\022\032.CoprocessorServiceRequest" + + "\032\033.CoprocessorServiceResponse\022/\n\010Snapsho", + "t\022\020.SnapshotRequest\032\021.SnapshotResponse\022V" + + "\n\025GetCompletedSnapshots\022\035.GetCompletedSn" + + "apshotsRequest\032\036.GetCompletedSnapshotsRe" + + "sponse\022A\n\016DeleteSnapshot\022\026.DeleteSnapsho" + + "tRequest\032\027.DeleteSnapshotResponse\022A\n\016IsS" + + "napshotDone\022\026.IsSnapshotDoneRequest\032\027.Is" + + "SnapshotDoneResponse\022D\n\017RestoreSnapshot\022" + + "\027.RestoreSnapshotRequest\032\030.RestoreSnapsh" + + "otResponse\022V\n\025IsRestoreSnapshotDone\022\035.Is" + + "RestoreSnapshotDoneRequest\032\036.IsRestoreSn", + "apshotDoneResponse\022D\n\017IsMasterRunning\022\027." + + "IsMasterRunningRequest\032\030.IsMasterRunning" + + "Response\022D\n\017ModifyNamespace\022\027.ModifyName" + + "spaceRequest\032\030.ModifyNamespaceResponse\022D" + + "\n\017CreateNamespace\022\027.CreateNamespaceReque" + + "st\032\030.CreateNamespaceResponse\022D\n\017DeleteNa" + + "mespace\022\027.DeleteNamespaceRequest\032\030.Delet" + + "eNamespaceResponse\022Y\n\026GetNamespaceDescri" + + "ptor\022\036.GetNamespaceDescriptorRequest\032\037.G" + + "etNamespaceDescriptorResponse\022_\n\030ListNam", + "espaceDescriptors\022 .ListNamespaceDescrip" + + "torsRequest\032!.ListNamespaceDescriptorsRe" + + "sponse\022t\n\037ListTableDescriptorsByNamespac" + + "e\022\'.ListTableDescriptorsByNamespaceReque" + + "st\032(.ListTableDescriptorsByNamespaceResp" + + "onse\022b\n\031ListTableNamesByNamespace\022!.List" + + "TableNamesByNamespaceRequest\032\".ListTable" + + "NamesByNamespaceResponseBG\n*org.apache.h" + + "adoop.hbase.protobuf.generatedB\021MasterAd" + + "minProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -35732,24 +35733,24 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ModifyNamespaceRequest_descriptor, new java.lang.String[] { "NamespaceDescriptor", }); + internal_static_ModifyNamespaceResponse_descriptor = + getDescriptor().getMessageTypes().get(31); + internal_static_ModifyNamespaceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ModifyNamespaceResponse_descriptor, + new java.lang.String[] { }); internal_static_GetNamespaceDescriptorRequest_descriptor = - getDescriptor().getMessageTypes().get(31); + getDescriptor().getMessageTypes().get(32); internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetNamespaceDescriptorRequest_descriptor, new java.lang.String[] { "NamespaceName", }); internal_static_GetNamespaceDescriptorResponse_descriptor = - getDescriptor().getMessageTypes().get(32); + getDescriptor().getMessageTypes().get(33); internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetNamespaceDescriptorResponse_descriptor, new java.lang.String[] { "NamespaceDescriptor", }); - internal_static_ModifyNamespaceResponse_descriptor = - getDescriptor().getMessageTypes().get(33); - internal_static_ModifyNamespaceResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ModifyNamespaceResponse_descriptor, - new java.lang.String[] { }); internal_static_ListNamespaceDescriptorsRequest_descriptor = getDescriptor().getMessageTypes().get(34); internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable = new @@ -35834,17 +35835,17 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SetBalancerRunningResponse_descriptor, new java.lang.String[] { "PrevBalanceValue", }); - internal_static_CatalogScanRequest_descriptor = + internal_static_RunCatalogScanRequest_descriptor = getDescriptor().getMessageTypes().get(48); - internal_static_CatalogScanRequest_fieldAccessorTable = new + internal_static_RunCatalogScanRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CatalogScanRequest_descriptor, + internal_static_RunCatalogScanRequest_descriptor, new java.lang.String[] { }); - internal_static_CatalogScanResponse_descriptor = + internal_static_RunCatalogScanResponse_descriptor = getDescriptor().getMessageTypes().get(49); - internal_static_CatalogScanResponse_fieldAccessorTable = new + internal_static_RunCatalogScanResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CatalogScanResponse_descriptor, + internal_static_RunCatalogScanResponse_descriptor, new java.lang.String[] { "ScanResult", }); internal_static_EnableCatalogJanitorRequest_descriptor = getDescriptor().getMessageTypes().get(50); @@ -35870,29 +35871,29 @@ com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_IsCatalogJanitorEnabledResponse_descriptor, new java.lang.String[] { "Value", }); - internal_static_TakeSnapshotRequest_descriptor = + internal_static_SnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(54); - internal_static_TakeSnapshotRequest_fieldAccessorTable = new + internal_static_SnapshotRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TakeSnapshotRequest_descriptor, + internal_static_SnapshotRequest_descriptor, new java.lang.String[] { "Snapshot", }); - internal_static_TakeSnapshotResponse_descriptor = + internal_static_SnapshotResponse_descriptor = getDescriptor().getMessageTypes().get(55); - internal_static_TakeSnapshotResponse_fieldAccessorTable = new + internal_static_SnapshotResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TakeSnapshotResponse_descriptor, + internal_static_SnapshotResponse_descriptor, new java.lang.String[] { "ExpectedTimeout", }); - internal_static_ListSnapshotRequest_descriptor = + internal_static_GetCompletedSnapshotsRequest_descriptor = getDescriptor().getMessageTypes().get(56); - internal_static_ListSnapshotRequest_fieldAccessorTable = new + internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ListSnapshotRequest_descriptor, + internal_static_GetCompletedSnapshotsRequest_descriptor, new java.lang.String[] { }); - internal_static_ListSnapshotResponse_descriptor = + internal_static_GetCompletedSnapshotsResponse_descriptor = getDescriptor().getMessageTypes().get(57); - internal_static_ListSnapshotResponse_fieldAccessorTable = new + internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ListSnapshotResponse_descriptor, + internal_static_GetCompletedSnapshotsResponse_descriptor, new java.lang.String[] { "Snapshots", }); internal_static_DeleteSnapshotRequest_descriptor = getDescriptor().getMessageTypes().get(58); Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MapReduceProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MapReduceProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MapReduceProtos.java (working copy) @@ -731,7 +731,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\017MapReduce.proto\032\013hbase.proto\".\n\013ScanMe" + + "\n\017MapReduce.proto\032\013HBase.proto\".\n\013ScanMe" + "trics\022\037\n\007metrics\030\001 \003(\0132\016.NameInt64PairBB" + "\n*org.apache.hadoop.hbase.protobuf.gener" + "atedB\017MapReduceProtosH\001\240\001\001" Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java (working copy) @@ -8,7 +8,7 @@ public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } - public interface RowProcessorRequestOrBuilder + public interface ProcessRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string row_processor_class_name = 1; @@ -52,24 +52,24 @@ com.google.protobuf.ByteString getRowProcessorInitializerMessage(); } /** - * Protobuf type {@code RowProcessorRequest} + * Protobuf type {@code ProcessRequest} */ - public static final class RowProcessorRequest extends + public static final class ProcessRequest extends com.google.protobuf.GeneratedMessage - implements RowProcessorRequestOrBuilder { - // Use RowProcessorRequest.newBuilder() to construct. - private RowProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements ProcessRequestOrBuilder { + // Use ProcessRequest.newBuilder() to construct. + private ProcessRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private RowProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private ProcessRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final RowProcessorRequest defaultInstance; - public static RowProcessorRequest getDefaultInstance() { + private static final ProcessRequest defaultInstance; + public static ProcessRequest getDefaultInstance() { return defaultInstance; } - public RowProcessorRequest getDefaultInstanceForType() { + public ProcessRequest getDefaultInstanceForType() { return defaultInstance; } @@ -79,7 +79,7 @@ getUnknownFields() { return this.unknownFields; } - private RowProcessorRequest( + private ProcessRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -131,28 +131,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RowProcessorRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ProcessRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new RowProcessorRequest(input, extensionRegistry); + return new ProcessRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -327,10 +327,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest) obj; boolean result = true; result = result && (hasRowProcessorClassName() == other.hasRowProcessorClassName()); @@ -378,53 +378,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -433,7 +433,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -445,24 +445,24 @@ return builder; } /** - * Protobuf type {@code RowProcessorRequest} + * Protobuf type {@code ProcessRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequestOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -497,23 +497,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest build() { - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest(this); + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -534,16 +534,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.getDefaultInstance()) return this; if (other.hasRowProcessorClassName()) { bitField0_ |= 0x00000001; rowProcessorClassName_ = other.rowProcessorClassName_; @@ -573,11 +573,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -772,18 +772,18 @@ return this; } - // @@protoc_insertion_point(builder_scope:RowProcessorRequest) + // @@protoc_insertion_point(builder_scope:ProcessRequest) } static { - defaultInstance = new RowProcessorRequest(true); + defaultInstance = new ProcessRequest(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RowProcessorRequest) + // @@protoc_insertion_point(class_scope:ProcessRequest) } - public interface RowProcessorResultOrBuilder + public interface ProcessResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes row_processor_result = 1; @@ -797,24 +797,24 @@ com.google.protobuf.ByteString getRowProcessorResult(); } /** - * Protobuf type {@code RowProcessorResult} + * Protobuf type {@code ProcessResponse} */ - public static final class RowProcessorResult extends + public static final class ProcessResponse extends com.google.protobuf.GeneratedMessage - implements RowProcessorResultOrBuilder { - // Use RowProcessorResult.newBuilder() to construct. - private RowProcessorResult(com.google.protobuf.GeneratedMessage.Builder builder) { + implements ProcessResponseOrBuilder { + // Use ProcessResponse.newBuilder() to construct. + private ProcessResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private RowProcessorResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private ProcessResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final RowProcessorResult defaultInstance; - public static RowProcessorResult getDefaultInstance() { + private static final ProcessResponse defaultInstance; + public static ProcessResponse getDefaultInstance() { return defaultInstance; } - public RowProcessorResult getDefaultInstanceForType() { + public ProcessResponse getDefaultInstanceForType() { return defaultInstance; } @@ -824,7 +824,7 @@ getUnknownFields() { return this.unknownFields; } - private RowProcessorResult( + private ProcessResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -866,28 +866,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RowProcessorResult parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ProcessResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new RowProcessorResult(input, extensionRegistry); + return new ProcessResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -960,10 +960,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) obj; + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse) obj; boolean result = true; result = result && (hasRowProcessorResult() == other.hasRowProcessorResult()); @@ -993,53 +993,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1048,7 +1048,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -1060,24 +1060,24 @@ return builder; } /** - * Protobuf type {@code RowProcessorResult} + * Protobuf type {@code ProcessResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResultOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1108,23 +1108,23 @@ public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult build() { - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult(this); + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -1137,16 +1137,16 @@ } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance()) return this; + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.getDefaultInstance()) return this; if (other.hasRowProcessorResult()) { setRowProcessorResult(other.getRowProcessorResult()); } @@ -1166,11 +1166,11 @@ com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -1217,15 +1217,15 @@ return this; } - // @@protoc_insertion_point(builder_scope:RowProcessorResult) + // @@protoc_insertion_point(builder_scope:ProcessResponse) } static { - defaultInstance = new RowProcessorResult(true); + defaultInstance = new ProcessResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RowProcessorResult) + // @@protoc_insertion_point(class_scope:ProcessResponse) } /** @@ -1237,12 +1237,12 @@ public interface Interface { /** - * rpc Process(.RowProcessorRequest) returns (.RowProcessorResult); + * rpc Process(.ProcessRequest) returns (.ProcessResponse); */ public abstract void process( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest request, + com.google.protobuf.RpcCallback done); } @@ -1252,8 +1252,8 @@ @java.lang.Override public void process( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest request, + com.google.protobuf.RpcCallback done) { impl.process(controller, request, done); } @@ -1280,7 +1280,7 @@ } switch(method.getIndex()) { case 0: - return impl.process(controller, (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)request); + return impl.process(controller, (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } @@ -1296,7 +1296,7 @@ } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -1312,7 +1312,7 @@ } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -1322,12 +1322,12 @@ } /** - * rpc Process(.RowProcessorRequest) returns (.RowProcessorResult); + * rpc Process(.ProcessRequest) returns (.ProcessResponse); */ public abstract void process( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest request, + com.google.protobuf.RpcCallback done); public static final com.google.protobuf.Descriptors.ServiceDescriptor @@ -1352,8 +1352,8 @@ } switch(method.getIndex()) { case 0: - this.process(controller, (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + this.process(controller, (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( done)); return; default: @@ -1371,7 +1371,7 @@ } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -1387,7 +1387,7 @@ } switch(method.getIndex()) { case 0: - return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -1411,17 +1411,17 @@ public void process( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest request, + com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(), + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance())); + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.getDefaultInstance())); } } @@ -1431,9 +1431,9 @@ } public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult process( + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse process( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request) + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest request) throws com.google.protobuf.ServiceException; } @@ -1444,15 +1444,15 @@ private final com.google.protobuf.BlockingRpcChannel channel; - public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult process( + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse process( com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request) + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest request) throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) channel.callBlockingMethod( + return (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, - org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance()); + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.getDefaultInstance()); } } @@ -1461,15 +1461,15 @@ } private static com.google.protobuf.Descriptors.Descriptor - internal_static_RowProcessorRequest_descriptor; + internal_static_ProcessRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RowProcessorRequest_fieldAccessorTable; + internal_static_ProcessRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RowProcessorResult_descriptor; + internal_static_ProcessResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RowProcessorResult_fieldAccessorTable; + internal_static_ProcessResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -1479,33 +1479,32 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\022RowProcessor.proto\"\222\001\n\023RowProcessorReq" + - "uest\022 \n\030row_processor_class_name\030\001 \002(\t\022." + - "\n&row_processor_initializer_message_name" + - "\030\002 \001(\t\022)\n!row_processor_initializer_mess" + - "age\030\003 \001(\014\"2\n\022RowProcessorResult\022\034\n\024row_p" + - "rocessor_result\030\001 \002(\0142K\n\023RowProcessorSer" + - "vice\0224\n\007Process\022\024.RowProcessorRequest\032\023." + - "RowProcessorResultBH\n*org.apache.hadoop." + - "hbase.protobuf.generatedB\022RowProcessorPr" + - "otosH\001\210\001\001\240\001\001" + "\n\022RowProcessor.proto\"\215\001\n\016ProcessRequest\022" + + " \n\030row_processor_class_name\030\001 \002(\t\022.\n&row" + + "_processor_initializer_message_name\030\002 \001(" + + "\t\022)\n!row_processor_initializer_message\030\003" + + " \001(\014\"/\n\017ProcessResponse\022\034\n\024row_processor" + + "_result\030\001 \002(\0142C\n\023RowProcessorService\022,\n\007" + + "Process\022\017.ProcessRequest\032\020.ProcessRespon" + + "seBH\n*org.apache.hadoop.hbase.protobuf.g" + + "eneratedB\022RowProcessorProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; - internal_static_RowProcessorRequest_descriptor = + internal_static_ProcessRequest_descriptor = getDescriptor().getMessageTypes().get(0); - internal_static_RowProcessorRequest_fieldAccessorTable = new + internal_static_ProcessRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RowProcessorRequest_descriptor, + internal_static_ProcessRequest_descriptor, new java.lang.String[] { "RowProcessorClassName", "RowProcessorInitializerMessageName", "RowProcessorInitializerMessage", }); - internal_static_RowProcessorResult_descriptor = + internal_static_ProcessResponse_descriptor = getDescriptor().getMessageTypes().get(1); - internal_static_RowProcessorResult_fieldAccessorTable = new + internal_static_ProcessResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RowProcessorResult_descriptor, + internal_static_ProcessResponse_descriptor, new java.lang.String[] { "RowProcessorResult", }); return null; } Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java (working copy) @@ -5496,7 +5496,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\030RegionServerStatus.proto\032\013hbase.proto\032" + + "\n\030RegionServerStatus.proto\032\013HBase.proto\032" + "\023ClusterStatus.proto\"b\n\032RegionServerStar" + "tupRequest\022\014\n\004port\030\001 \002(\r\022\031\n\021server_start" + "_code\030\002 \002(\004\022\033\n\023server_current_time\030\003 \002(\004" + Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java (working copy) @@ -27611,7 +27611,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\014Client.proto\032\013hbase.proto\032\014Filter.prot" + + "\n\014Client.proto\032\013HBase.proto\032\014Filter.prot" + "o\032\nCell.proto\032\020Comparator.proto\"+\n\006Colum" + "n\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"\347\001\n" + "\003Get\022\013\n\003row\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007.Colu" + Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java (working copy) @@ -2323,32 +2323,32 @@ // @@protoc_insertion_point(class_scope:Token) } - public interface TokenRequestOrBuilder + public interface GetAuthenticationTokenRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { } /** - * Protobuf type {@code TokenRequest} + * Protobuf type {@code GetAuthenticationTokenRequest} * *
    * RPC request & response messages
    * 
*/ - public static final class TokenRequest extends + public static final class GetAuthenticationTokenRequest extends com.google.protobuf.GeneratedMessage - implements TokenRequestOrBuilder { - // Use TokenRequest.newBuilder() to construct. - private TokenRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + implements GetAuthenticationTokenRequestOrBuilder { + // Use GetAuthenticationTokenRequest.newBuilder() to construct. + private GetAuthenticationTokenRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private TokenRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private GetAuthenticationTokenRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final TokenRequest defaultInstance; - public static TokenRequest getDefaultInstance() { + private static final GetAuthenticationTokenRequest defaultInstance; + public static GetAuthenticationTokenRequest getDefaultInstance() { return defaultInstance; } - public TokenRequest getDefaultInstanceForType() { + public GetAuthenticationTokenRequest getDefaultInstanceForType() { return defaultInstance; } @@ -2358,7 +2358,7 @@ getUnknownFields() { return this.unknownFields; } - private TokenRequest( + private GetAuthenticationTokenRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -2394,28 +2394,28 @@ } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TokenRequest parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetAuthenticationTokenRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new TokenRequest(input, extensionRegistry); + return new GetAuthenticationTokenRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -2459,10 +2459,10 @@ if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest) obj; + org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest) obj; boolean result = true; result = result && @@ -2483,53 +2483,53 @@ return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2538,7 +2538,7 @@ public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2550,7 +2550,7 @@ return builder; } /** - * Protobuf type {@code TokenRequest} + * Protobuf type {@code GetAuthenticationTokenRequest} * *
      * RPC request & response messages
@@ -2558,20 +2558,20 @@
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
-       implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequestOrBuilder {
+       implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequestOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.Builder.class);
+                org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.Builder.class);
       }
 
-      // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.newBuilder()
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
@@ -2600,38 +2600,38 @@
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenRequest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_descriptor;
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.getDefaultInstance();
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.getDefaultInstance();
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest build() {
-        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest result = buildPartial();
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest build() {
+        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
         return result;
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest buildPartial() {
-        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest(this);
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest(this);
         onBuilt();
         return result;
       }
 
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest) {
-          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest)other);
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest other) {
-        if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.getDefaultInstance()) return this;
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.getDefaultInstance()) return this;
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -2644,11 +2644,11 @@
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest parsedMessage = null;
+        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest parsedMessage = null;
         try {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest) e.getUnfinishedMessage();
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest) e.getUnfinishedMessage();
           throw e;
         } finally {
           if (parsedMessage != null) {
@@ -2658,18 +2658,18 @@
         return this;
       }
 
-      // @@protoc_insertion_point(builder_scope:TokenRequest)
+      // @@protoc_insertion_point(builder_scope:GetAuthenticationTokenRequest)
     }
 
     static {
-      defaultInstance = new TokenRequest(true);
+      defaultInstance = new GetAuthenticationTokenRequest(true);
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:TokenRequest)
+    // @@protoc_insertion_point(class_scope:GetAuthenticationTokenRequest)
   }
 
-  public interface TokenResponseOrBuilder
+  public interface GetAuthenticationTokenResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
 
     // optional .Token token = 1;
@@ -2687,24 +2687,24 @@
     org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder getTokenOrBuilder();
   }
   /**
-   * Protobuf type {@code TokenResponse}
+   * Protobuf type {@code GetAuthenticationTokenResponse}
    */
-  public static final class TokenResponse extends
+  public static final class GetAuthenticationTokenResponse extends
       com.google.protobuf.GeneratedMessage
-      implements TokenResponseOrBuilder {
-    // Use TokenResponse.newBuilder() to construct.
-    private TokenResponse(com.google.protobuf.GeneratedMessage.Builder builder) {
+      implements GetAuthenticationTokenResponseOrBuilder {
+    // Use GetAuthenticationTokenResponse.newBuilder() to construct.
+    private GetAuthenticationTokenResponse(com.google.protobuf.GeneratedMessage.Builder builder) {
       super(builder);
       this.unknownFields = builder.getUnknownFields();
     }
-    private TokenResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+    private GetAuthenticationTokenResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-    private static final TokenResponse defaultInstance;
-    public static TokenResponse getDefaultInstance() {
+    private static final GetAuthenticationTokenResponse defaultInstance;
+    public static GetAuthenticationTokenResponse getDefaultInstance() {
       return defaultInstance;
     }
 
-    public TokenResponse getDefaultInstanceForType() {
+    public GetAuthenticationTokenResponse getDefaultInstanceForType() {
       return defaultInstance;
     }
 
@@ -2714,7 +2714,7 @@
         getUnknownFields() {
       return this.unknownFields;
     }
-    private TokenResponse(
+    private GetAuthenticationTokenResponse(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
@@ -2764,28 +2764,28 @@
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.Builder.class);
+              org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.Builder.class);
     }
 
-    public static com.google.protobuf.Parser PARSER =
-        new com.google.protobuf.AbstractParser() {
-      public TokenResponse parsePartialFrom(
+    public static com.google.protobuf.Parser PARSER =
+        new com.google.protobuf.AbstractParser() {
+      public GetAuthenticationTokenResponse parsePartialFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return new TokenResponse(input, extensionRegistry);
+        return new GetAuthenticationTokenResponse(input, extensionRegistry);
       }
     };
 
     @java.lang.Override
-    public com.google.protobuf.Parser getParserForType() {
+    public com.google.protobuf.Parser getParserForType() {
       return PARSER;
     }
 
@@ -2860,10 +2860,10 @@
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse)) {
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse)) {
         return super.equals(obj);
       }
-      org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse) obj;
+      org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse other = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse) obj;
 
       boolean result = true;
       result = result && (hasToken() == other.hasToken());
@@ -2893,53 +2893,53 @@
       return hash;
     }
 
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(byte[] data)
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseDelimitedFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseDelimitedFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -2948,7 +2948,7 @@
 
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse prototype) {
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
@@ -2960,24 +2960,24 @@
       return builder;
     }
     /**
-     * Protobuf type {@code TokenResponse}
+     * Protobuf type {@code GetAuthenticationTokenResponse}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
-       implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponseOrBuilder {
+       implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponseOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.Builder.class);
+                org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.Builder.class);
       }
 
-      // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.newBuilder()
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
@@ -3013,23 +3013,23 @@
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenResponse_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_descriptor;
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance();
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.getDefaultInstance();
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse build() {
-        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse result = buildPartial();
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse build() {
+        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
         return result;
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse buildPartial() {
-        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse(this);
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse result = new org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse(this);
         int from_bitField0_ = bitField0_;
         int to_bitField0_ = 0;
         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
@@ -3046,16 +3046,16 @@
       }
 
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse) {
-          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse)other);
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse other) {
-        if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance()) return this;
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.getDefaultInstance()) return this;
         if (other.hasToken()) {
           mergeToken(other.getToken());
         }
@@ -3071,11 +3071,11 @@
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse parsedMessage = null;
+        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse parsedMessage = null;
         try {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse) e.getUnfinishedMessage();
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse) e.getUnfinishedMessage();
           throw e;
         } finally {
           if (parsedMessage != null) {
@@ -3203,15 +3203,15 @@
         return tokenBuilder_;
       }
 
-      // @@protoc_insertion_point(builder_scope:TokenResponse)
+      // @@protoc_insertion_point(builder_scope:GetAuthenticationTokenResponse)
     }
 
     static {
-      defaultInstance = new TokenResponse(true);
+      defaultInstance = new GetAuthenticationTokenResponse(true);
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:TokenResponse)
+    // @@protoc_insertion_point(class_scope:GetAuthenticationTokenResponse)
   }
 
   public interface WhoAmIRequestOrBuilder
@@ -4239,12 +4239,12 @@
 
     public interface Interface {
       /**
-       * rpc GetAuthenticationToken(.TokenRequest) returns (.TokenResponse);
+       * rpc GetAuthenticationToken(.GetAuthenticationTokenRequest) returns (.GetAuthenticationTokenResponse);
        */
       public abstract void getAuthenticationToken(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request,
-          com.google.protobuf.RpcCallback done);
+          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest request,
+          com.google.protobuf.RpcCallback done);
 
       /**
        * rpc WhoAmI(.WhoAmIRequest) returns (.WhoAmIResponse);
@@ -4262,8 +4262,8 @@
         @java.lang.Override
         public  void getAuthenticationToken(
             com.google.protobuf.RpcController controller,
-            org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request,
-            com.google.protobuf.RpcCallback done) {
+            org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest request,
+            com.google.protobuf.RpcCallback done) {
           impl.getAuthenticationToken(controller, request, done);
         }
 
@@ -4298,7 +4298,7 @@
           }
           switch(method.getIndex()) {
             case 0:
-              return impl.getAuthenticationToken(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest)request);
+              return impl.getAuthenticationToken(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest)request);
             case 1:
               return impl.whoAmI(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest)request);
             default:
@@ -4316,7 +4316,7 @@
           }
           switch(method.getIndex()) {
             case 0:
-              return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.getDefaultInstance();
             case 1:
               return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.getDefaultInstance();
             default:
@@ -4334,7 +4334,7 @@
           }
           switch(method.getIndex()) {
             case 0:
-              return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.getDefaultInstance();
             case 1:
               return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.getDefaultInstance();
             default:
@@ -4346,12 +4346,12 @@
     }
 
     /**
-     * rpc GetAuthenticationToken(.TokenRequest) returns (.TokenResponse);
+     * rpc GetAuthenticationToken(.GetAuthenticationTokenRequest) returns (.GetAuthenticationTokenResponse);
      */
     public abstract void getAuthenticationToken(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request,
-        com.google.protobuf.RpcCallback done);
+        org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest request,
+        com.google.protobuf.RpcCallback done);
 
     /**
      * rpc WhoAmI(.WhoAmIRequest) returns (.WhoAmIResponse);
@@ -4384,8 +4384,8 @@
       }
       switch(method.getIndex()) {
         case 0:
-          this.getAuthenticationToken(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest)request,
-            com.google.protobuf.RpcUtil.specializeCallback(
+          this.getAuthenticationToken(controller, (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest)request,
+            com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
         case 1:
@@ -4408,7 +4408,7 @@
       }
       switch(method.getIndex()) {
         case 0:
-          return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.getDefaultInstance();
         case 1:
           return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.getDefaultInstance();
         default:
@@ -4426,7 +4426,7 @@
       }
       switch(method.getIndex()) {
         case 0:
-          return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.getDefaultInstance();
         case 1:
           return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.getDefaultInstance();
         default:
@@ -4452,17 +4452,17 @@
 
       public  void getAuthenticationToken(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request,
-          com.google.protobuf.RpcCallback done) {
+          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest request,
+          com.google.protobuf.RpcCallback done) {
         channel.callMethod(
           getDescriptor().getMethods().get(0),
           controller,
           request,
-          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance(),
+          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.getDefaultInstance(),
           com.google.protobuf.RpcUtil.generalizeCallback(
             done,
-            org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.class,
-            org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance()));
+            org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.class,
+            org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.getDefaultInstance()));
       }
 
       public  void whoAmI(
@@ -4487,9 +4487,9 @@
     }
 
     public interface BlockingInterface {
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse getAuthenticationToken(
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse getAuthenticationToken(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request)
+          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest request)
           throws com.google.protobuf.ServiceException;
 
       public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse whoAmI(
@@ -4505,15 +4505,15 @@
 
       private final com.google.protobuf.BlockingRpcChannel channel;
 
-      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse getAuthenticationToken(
+      public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse getAuthenticationToken(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenRequest request)
+          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest request)
           throws com.google.protobuf.ServiceException {
-        return (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse) channel.callBlockingMethod(
+        return (org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse) channel.callBlockingMethod(
           getDescriptor().getMethods().get(0),
           controller,
           request,
-          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenResponse.getDefaultInstance());
+          org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.getDefaultInstance());
       }
 
 
@@ -4549,15 +4549,15 @@
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
       internal_static_Token_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_TokenRequest_descriptor;
+    internal_static_GetAuthenticationTokenRequest_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_TokenRequest_fieldAccessorTable;
+      internal_static_GetAuthenticationTokenRequest_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_TokenResponse_descriptor;
+    internal_static_GetAuthenticationTokenResponse_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_TokenResponse_fieldAccessorTable;
+      internal_static_GetAuthenticationTokenResponse_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
     internal_static_WhoAmIRequest_descriptor;
   private static
@@ -4585,15 +4585,17 @@
       " \001(\003\022\027\n\017expiration_date\030\005 \001(\003\022\027\n\017sequenc" +
       "e_number\030\006 \001(\003\"\034\n\004Kind\022\024\n\020HBASE_AUTH_TOK" +
       "EN\020\000\">\n\005Token\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010pas" +
-      "sword\030\002 \001(\014\022\017\n\007service\030\003 \001(\014\"\016\n\014TokenReq" +
-      "uest\"&\n\rTokenResponse\022\025\n\005token\030\001 \001(\0132\006.T",
-      "oken\"\017\n\rWhoAmIRequest\"7\n\016WhoAmIResponse\022" +
-      "\020\n\010username\030\001 \001(\t\022\023\n\013auth_method\030\002 \001(\t2{" +
-      "\n\025AuthenticationService\0227\n\026GetAuthentica" +
-      "tionToken\022\r.TokenRequest\032\016.TokenResponse" +
-      "\022)\n\006WhoAmI\022\016.WhoAmIRequest\032\017.WhoAmIRespo" +
-      "nseBJ\n*org.apache.hadoop.hbase.protobuf." +
-      "generatedB\024AuthenticationProtosH\001\210\001\001\240\001\001"
+      "sword\030\002 \001(\014\022\017\n\007service\030\003 \001(\014\"\037\n\035GetAuthe" +
+      "nticationTokenRequest\"7\n\036GetAuthenticati",
+      "onTokenResponse\022\025\n\005token\030\001 \001(\0132\006.Token\"\017" +
+      "\n\rWhoAmIRequest\"7\n\016WhoAmIResponse\022\020\n\010use" +
+      "rname\030\001 \001(\t\022\023\n\013auth_method\030\002 \001(\t2\235\001\n\025Aut" +
+      "henticationService\022Y\n\026GetAuthenticationT" +
+      "oken\022\036.GetAuthenticationTokenRequest\032\037.G" +
+      "etAuthenticationTokenResponse\022)\n\006WhoAmI\022" +
+      "\016.WhoAmIRequest\032\017.WhoAmIResponseBJ\n*org." +
+      "apache.hadoop.hbase.protobuf.generatedB\024" +
+      "AuthenticationProtosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -4618,17 +4620,17 @@
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_Token_descriptor,
               new java.lang.String[] { "Identifier", "Password", "Service", });
-          internal_static_TokenRequest_descriptor =
+          internal_static_GetAuthenticationTokenRequest_descriptor =
             getDescriptor().getMessageTypes().get(3);
-          internal_static_TokenRequest_fieldAccessorTable = new
+          internal_static_GetAuthenticationTokenRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_TokenRequest_descriptor,
+              internal_static_GetAuthenticationTokenRequest_descriptor,
               new java.lang.String[] { });
-          internal_static_TokenResponse_descriptor =
+          internal_static_GetAuthenticationTokenResponse_descriptor =
             getDescriptor().getMessageTypes().get(4);
-          internal_static_TokenResponse_fieldAccessorTable = new
+          internal_static_GetAuthenticationTokenResponse_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_TokenResponse_descriptor,
+              internal_static_GetAuthenticationTokenResponse_descriptor,
               new java.lang.String[] { "Token", });
           internal_static_WhoAmIRequest_descriptor =
             getDescriptor().getMessageTypes().get(5);
Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java
===================================================================
--- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java	(revision 1522009)
+++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/Tracing.java	(working copy)
@@ -1,591 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: Tracing.proto
-
-package org.apache.hadoop.hbase.protobuf.generated;
-
-public final class Tracing {
-  private Tracing() {}
-  public static void registerAllExtensions(
-      com.google.protobuf.ExtensionRegistry registry) {
-  }
-  public interface RPCTInfoOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // optional int64 trace_id = 1;
-    /**
-     * optional int64 trace_id = 1;
-     */
-    boolean hasTraceId();
-    /**
-     * optional int64 trace_id = 1;
-     */
-    long getTraceId();
-
-    // optional int64 parent_id = 2;
-    /**
-     * optional int64 parent_id = 2;
-     */
-    boolean hasParentId();
-    /**
-     * optional int64 parent_id = 2;
-     */
-    long getParentId();
-  }
-  /**
-   * Protobuf type {@code RPCTInfo}
-   *
-   * 
-   *Used to pass through the information necessary to continue
-   *a trace after an RPC is made. All we need is the traceid 
-   *(so we know the overarching trace this message is a part of), and
-   *the id of the current span when this message was sent, so we know 
-   *what span caused the new span we will create when this message is received.
-   * 
- */ - public static final class RPCTInfo extends - com.google.protobuf.GeneratedMessage - implements RPCTInfoOrBuilder { - // Use RPCTInfo.newBuilder() to construct. - private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RPCTInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RPCTInfo defaultInstance; - public static RPCTInfo getDefaultInstance() { - return defaultInstance; - } - - public RPCTInfo getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private RPCTInfo( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - traceId_ = input.readInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - parentId_ = input.readInt64(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RPCTInfo parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RPCTInfo(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional int64 trace_id = 1; - public static final int TRACE_ID_FIELD_NUMBER = 1; - private long traceId_; - /** - * optional int64 trace_id = 1; - */ - public boolean hasTraceId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional int64 trace_id = 1; - */ - public long getTraceId() { - return traceId_; - } - - // optional int64 parent_id = 2; - public static final int PARENT_ID_FIELD_NUMBER = 2; - private long parentId_; - /** - * optional int64 parent_id = 2; - */ - public boolean hasParentId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional int64 parent_id = 2; - */ - public long getParentId() { - return parentId_; - } - - private void initFields() { - traceId_ = 0L; - parentId_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, traceId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt64(2, parentId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, traceId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(2, parentId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) obj; - - boolean result = true; - result = result && (hasTraceId() == other.hasTraceId()); - if (hasTraceId()) { - result = result && (getTraceId() - == other.getTraceId()); - } - result = result && (hasParentId() == other.hasParentId()); - if (hasParentId()) { - result = result && (getParentId() - == other.getParentId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTraceId()) { - hash = (37 * hash) + TRACE_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTraceId()); - } - if (hasParentId()) { - hash = (37 * hash) + PARENT_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getParentId()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code RPCTInfo} - * - *
-     *Used to pass through the information necessary to continue
-     *a trace after an RPC is made. All we need is the traceid 
-     *(so we know the overarching trace this message is a part of), and
-     *the id of the current span when this message was sent, so we know 
-     *what span caused the new span we will create when this message is received.
-     * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - traceId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - parentId_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo build() { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.traceId_ = traceId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.parentId_ = parentId_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) return this; - if (other.hasTraceId()) { - setTraceId(other.getTraceId()); - } - if (other.hasParentId()) { - setParentId(other.getParentId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional int64 trace_id = 1; - private long traceId_ ; - /** - * optional int64 trace_id = 1; - */ - public boolean hasTraceId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional int64 trace_id = 1; - */ - public long getTraceId() { - return traceId_; - } - /** - * optional int64 trace_id = 1; - */ - public Builder setTraceId(long value) { - bitField0_ |= 0x00000001; - traceId_ = value; - onChanged(); - return this; - } - /** - * optional int64 trace_id = 1; - */ - public Builder clearTraceId() { - bitField0_ = (bitField0_ & ~0x00000001); - traceId_ = 0L; - onChanged(); - return this; - } - - // optional int64 parent_id = 2; - private long parentId_ ; - /** - * optional int64 parent_id = 2; - */ - public boolean hasParentId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional int64 parent_id = 2; - */ - public long getParentId() { - return parentId_; - } - /** - * optional int64 parent_id = 2; - */ - public Builder setParentId(long value) { - bitField0_ |= 0x00000002; - parentId_ = value; - onChanged(); - return this; - } - /** - * optional int64 parent_id = 2; - */ - public Builder clearParentId() { - bitField0_ = (bitField0_ & ~0x00000002); - parentId_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RPCTInfo) - } - - static { - defaultInstance = new RPCTInfo(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RPCTInfo) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RPCTInfo_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RPCTInfo_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\rTracing.proto\"/\n\010RPCTInfo\022\020\n\010trace_id\030" + - "\001 \001(\003\022\021\n\tparent_id\030\002 \001(\003B:\n*org.apache.h" + - "adoop.hbase.protobuf.generatedB\007TracingH" + - "\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_RPCTInfo_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_RPCTInfo_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RPCTInfo_descriptor, - new java.lang.String[] { "TraceId", "ParentId", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java (working copy) @@ -1,5 +1,5 @@ // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: hbase.proto +// source: HBase.proto package org.apache.hadoop.hbase.protobuf.generated; @@ -14057,7 +14057,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\013hbase.proto\032\nCell.proto\"1\n\tTableName\022\021" + + "\n\013HBase.proto\032\nCell.proto\"1\n\tTableName\022\021" + "\n\tnamespace\030\001 \002(\014\022\021\n\tqualifier\030\002 \002(\014\"\250\001\n" + "\013TableSchema\022\036\n\ntable_name\030\001 \001(\0132\n.Table" + "Name\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPai" + Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java (working copy) @@ -4811,7 +4811,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\tWAL.proto\032\013hbase.proto\"$\n\tWALHeader\022\027\n" + + "\n\tWAL.proto\032\013HBase.proto\"$\n\tWALHeader\022\027\n" + "\017has_compression\030\001 \001(\010\"\337\001\n\006WALKey\022\033\n\023enc" + "oded_region_name\030\001 \002(\014\022\022\n\ntable_name\030\002 \002" + "(\014\022\033\n\023log_sequence_number\030\003 \002(\004\022\022\n\nwrite" + Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java (working copy) @@ -3598,11 +3598,11 @@ /** * optional .RPCTInfo trace_info = 2; */ - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo(); + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo(); /** * optional .RPCTInfo trace_info = 2; */ - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder(); // optional string method_name = 3; /** @@ -3724,11 +3724,11 @@ break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder subBuilder = null; + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = traceInfo_.toBuilder(); } - traceInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.PARSER, extensionRegistry); + traceInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(traceInfo_); traceInfo_ = subBuilder.buildPartial(); @@ -3825,7 +3825,7 @@ // optional .RPCTInfo trace_info = 2; public static final int TRACE_INFO_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo traceInfo_; + private org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo traceInfo_; /** * optional .RPCTInfo trace_info = 2; */ @@ -3835,13 +3835,13 @@ /** * optional .RPCTInfo trace_info = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo() { + public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo() { return traceInfo_; } /** * optional .RPCTInfo trace_info = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder() { return traceInfo_; } @@ -3948,7 +3948,7 @@ private void initFields() { callId_ = 0; - traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); methodName_ = ""; requestParam_ = false; cellBlockMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); @@ -4208,7 +4208,7 @@ callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); if (traceInfoBuilder_ == null) { - traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); } else { traceInfoBuilder_.clear(); } @@ -4389,9 +4389,9 @@ } // optional .RPCTInfo trace_info = 2; - private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + private org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> traceInfoBuilder_; + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder> traceInfoBuilder_; /** * optional .RPCTInfo trace_info = 2; */ @@ -4401,7 +4401,7 @@ /** * optional .RPCTInfo trace_info = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo() { + public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo getTraceInfo() { if (traceInfoBuilder_ == null) { return traceInfo_; } else { @@ -4411,7 +4411,7 @@ /** * optional .RPCTInfo trace_info = 2; */ - public Builder setTraceInfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { + public Builder setTraceInfo(org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -4428,7 +4428,7 @@ * optional .RPCTInfo trace_info = 2; */ public Builder setTraceInfo( - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder builderForValue) { if (traceInfoBuilder_ == null) { traceInfo_ = builderForValue.build(); onChanged(); @@ -4441,12 +4441,12 @@ /** * optional .RPCTInfo trace_info = 2; */ - public Builder mergeTraceInfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { + public Builder mergeTraceInfo(org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo value) { if (traceInfoBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && - traceInfo_ != org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) { + traceInfo_ != org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance()) { traceInfo_ = - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(traceInfo_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.newBuilder(traceInfo_).mergeFrom(value).buildPartial(); } else { traceInfo_ = value; } @@ -4462,7 +4462,7 @@ */ public Builder clearTraceInfo() { if (traceInfoBuilder_ == null) { - traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.getDefaultInstance(); onChanged(); } else { traceInfoBuilder_.clear(); @@ -4473,7 +4473,7 @@ /** * optional .RPCTInfo trace_info = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder getTraceInfoBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder getTraceInfoBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTraceInfoFieldBuilder().getBuilder(); @@ -4481,7 +4481,7 @@ /** * optional .RPCTInfo trace_info = 2; */ - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder getTraceInfoOrBuilder() { if (traceInfoBuilder_ != null) { return traceInfoBuilder_.getMessageOrBuilder(); } else { @@ -4492,11 +4492,11 @@ * optional .RPCTInfo trace_info = 2; */ private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder> getTraceInfoFieldBuilder() { if (traceInfoBuilder_ == null) { traceInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder>( traceInfo_, getParentForChildren(), isClean()); @@ -5787,7 +5787,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\tRPC.proto\032\rTracing.proto\032\013hbase.proto\"" + + "\n\tRPC.proto\032\rTracing.proto\032\013HBase.proto\"" + "<\n\017UserInformation\022\026\n\016effective_user\030\001 \002" + "(\t\022\021\n\treal_user\030\002 \001(\t\"\222\001\n\020ConnectionHead" + "er\022#\n\tuser_info\030\001 \001(\0132\020.UserInformation\022" + @@ -5854,7 +5854,7 @@ com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.Tracing.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.TracingProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java (working copy) @@ -15687,7 +15687,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\014Filter.proto\032\013hbase.proto\032\020Comparator." + + "\n\014Filter.proto\032\013HBase.proto\032\020Comparator." + "proto\"1\n\006Filter\022\014\n\004name\030\001 \002(\t\022\031\n\021seriali" + "zed_filter\030\002 \001(\014\"%\n\024ColumnCountGetFilter" + "\022\r\n\005limit\030\001 \002(\005\"N\n\026ColumnPaginationFilte" + Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java (working copy) @@ -10302,7 +10302,7 @@ descriptor; static { java.lang.String[] descriptorData = { - "\n\023ClusterStatus.proto\032\013hbase.proto\032\017Clus" + + "\n\023ClusterStatus.proto\032\013HBase.proto\032\017Clus" + "terId.proto\032\010FS.proto\"\243\002\n\013RegionState\022 \n" + "\013region_info\030\001 \002(\0132\013.RegionInfo\022!\n\005state" + "\030\002 \002(\0162\022.RegionState.State\022\r\n\005stamp\030\003 \001(" + Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java (revision 1522009) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java (working copy) @@ -8,7 +8,7 @@ public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } - public interface AggregateArgumentOrBuilder + public interface AggregateRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string interpreter_class_name = 1; @@ -16,7 +16,7 @@ * required string interpreter_class_name = 1; * *
-     ** The argument passed to the AggregateService consists of three parts
+     ** The request passed to the AggregateService consists of three parts
      *  (1) the (canonical) classname of the ColumnInterpreter implementation
      *  (2) the Scan query
      *  (3) any bytes required to construct the ColumnInterpreter object
@@ -28,7 +28,7 @@
      * required string interpreter_class_name = 1;
      *
      * 
-     ** The argument passed to the AggregateService consists of three parts
+     ** The request passed to the AggregateService consists of three parts
      *  (1) the (canonical) classname of the ColumnInterpreter implementation
      *  (2) the Scan query
      *  (3) any bytes required to construct the ColumnInterpreter object
@@ -40,7 +40,7 @@
      * required string interpreter_class_name = 1;
      *
      * 
-     ** The argument passed to the AggregateService consists of three parts
+     ** The request passed to the AggregateService consists of three parts
      *  (1) the (canonical) classname of the ColumnInterpreter implementation
      *  (2) the Scan query
      *  (3) any bytes required to construct the ColumnInterpreter object
@@ -75,24 +75,24 @@
     com.google.protobuf.ByteString getInterpreterSpecificBytes();
   }
   /**
-   * Protobuf type {@code AggregateArgument}
+   * Protobuf type {@code AggregateRequest}
    */
-  public static final class AggregateArgument extends
+  public static final class AggregateRequest extends
       com.google.protobuf.GeneratedMessage
-      implements AggregateArgumentOrBuilder {
-    // Use AggregateArgument.newBuilder() to construct.
-    private AggregateArgument(com.google.protobuf.GeneratedMessage.Builder builder) {
+      implements AggregateRequestOrBuilder {
+    // Use AggregateRequest.newBuilder() to construct.
+    private AggregateRequest(com.google.protobuf.GeneratedMessage.Builder builder) {
       super(builder);
       this.unknownFields = builder.getUnknownFields();
     }
-    private AggregateArgument(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+    private AggregateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-    private static final AggregateArgument defaultInstance;
-    public static AggregateArgument getDefaultInstance() {
+    private static final AggregateRequest defaultInstance;
+    public static AggregateRequest getDefaultInstance() {
       return defaultInstance;
     }
 
-    public AggregateArgument getDefaultInstanceForType() {
+    public AggregateRequest getDefaultInstanceForType() {
       return defaultInstance;
     }
 
@@ -102,7 +102,7 @@
         getUnknownFields() {
       return this.unknownFields;
     }
-    private AggregateArgument(
+    private AggregateRequest(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
@@ -162,28 +162,28 @@
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.Builder.class);
+              org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class);
     }
 
-    public static com.google.protobuf.Parser PARSER =
-        new com.google.protobuf.AbstractParser() {
-      public AggregateArgument parsePartialFrom(
+    public static com.google.protobuf.Parser PARSER =
+        new com.google.protobuf.AbstractParser() {
+      public AggregateRequest parsePartialFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return new AggregateArgument(input, extensionRegistry);
+        return new AggregateRequest(input, extensionRegistry);
       }
     };
 
     @java.lang.Override
-    public com.google.protobuf.Parser getParserForType() {
+    public com.google.protobuf.Parser getParserForType() {
       return PARSER;
     }
 
@@ -195,7 +195,7 @@
      * required string interpreter_class_name = 1;
      *
      * 
-     ** The argument passed to the AggregateService consists of three parts
+     ** The request passed to the AggregateService consists of three parts
      *  (1) the (canonical) classname of the ColumnInterpreter implementation
      *  (2) the Scan query
      *  (3) any bytes required to construct the ColumnInterpreter object
@@ -209,7 +209,7 @@
      * required string interpreter_class_name = 1;
      *
      * 
-     ** The argument passed to the AggregateService consists of three parts
+     ** The request passed to the AggregateService consists of three parts
      *  (1) the (canonical) classname of the ColumnInterpreter implementation
      *  (2) the Scan query
      *  (3) any bytes required to construct the ColumnInterpreter object
@@ -234,7 +234,7 @@
      * required string interpreter_class_name = 1;
      *
      * 
-     ** The argument passed to the AggregateService consists of three parts
+     ** The request passed to the AggregateService consists of three parts
      *  (1) the (canonical) classname of the ColumnInterpreter implementation
      *  (2) the Scan query
      *  (3) any bytes required to construct the ColumnInterpreter object
@@ -369,10 +369,10 @@
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)) {
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) {
         return super.equals(obj);
       }
-      org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) obj;
+      org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj;
 
       boolean result = true;
       result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
@@ -420,53 +420,53 @@
       return hash;
     }
 
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(byte[] data)
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseDelimitedFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -475,7 +475,7 @@
 
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument prototype) {
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
@@ -487,24 +487,24 @@
       return builder;
     }
     /**
-     * Protobuf type {@code AggregateArgument}
+     * Protobuf type {@code AggregateRequest}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder
-       implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgumentOrBuilder {
+       implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequestOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.Builder.class);
+                org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class);
       }
 
-      // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.newBuilder()
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
@@ -544,23 +544,23 @@
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateArgument_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_descriptor;
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument build() {
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = buildPartial();
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest build() {
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
         return result;
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument buildPartial() {
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument(this);
+      public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest(this);
         int from_bitField0_ = bitField0_;
         int to_bitField0_ = 0;
         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
@@ -585,16 +585,16 @@
       }
 
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) {
-          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)other);
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument other) {
-        if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance()) return this;
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance()) return this;
         if (other.hasInterpreterClassName()) {
           bitField0_ |= 0x00000001;
           interpreterClassName_ = other.interpreterClassName_;
@@ -630,11 +630,11 @@
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument parsedMessage = null;
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parsedMessage = null;
         try {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument) e.getUnfinishedMessage();
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) e.getUnfinishedMessage();
           throw e;
         } finally {
           if (parsedMessage != null) {
@@ -651,7 +651,7 @@
        * required string interpreter_class_name = 1;
        *
        * 
-       ** The argument passed to the AggregateService consists of three parts
+       ** The request passed to the AggregateService consists of three parts
        *  (1) the (canonical) classname of the ColumnInterpreter implementation
        *  (2) the Scan query
        *  (3) any bytes required to construct the ColumnInterpreter object
@@ -665,7 +665,7 @@
        * required string interpreter_class_name = 1;
        *
        * 
-       ** The argument passed to the AggregateService consists of three parts
+       ** The request passed to the AggregateService consists of three parts
        *  (1) the (canonical) classname of the ColumnInterpreter implementation
        *  (2) the Scan query
        *  (3) any bytes required to construct the ColumnInterpreter object
@@ -687,7 +687,7 @@
        * required string interpreter_class_name = 1;
        *
        * 
-       ** The argument passed to the AggregateService consists of three parts
+       ** The request passed to the AggregateService consists of three parts
        *  (1) the (canonical) classname of the ColumnInterpreter implementation
        *  (2) the Scan query
        *  (3) any bytes required to construct the ColumnInterpreter object
@@ -711,7 +711,7 @@
        * required string interpreter_class_name = 1;
        *
        * 
-       ** The argument passed to the AggregateService consists of three parts
+       ** The request passed to the AggregateService consists of three parts
        *  (1) the (canonical) classname of the ColumnInterpreter implementation
        *  (2) the Scan query
        *  (3) any bytes required to construct the ColumnInterpreter object
@@ -732,7 +732,7 @@
        * required string interpreter_class_name = 1;
        *
        * 
-       ** The argument passed to the AggregateService consists of three parts
+       ** The request passed to the AggregateService consists of three parts
        *  (1) the (canonical) classname of the ColumnInterpreter implementation
        *  (2) the Scan query
        *  (3) any bytes required to construct the ColumnInterpreter object
@@ -749,7 +749,7 @@
        * required string interpreter_class_name = 1;
        *
        * 
-       ** The argument passed to the AggregateService consists of three parts
+       ** The request passed to the AggregateService consists of three parts
        *  (1) the (canonical) classname of the ColumnInterpreter implementation
        *  (2) the Scan query
        *  (3) any bytes required to construct the ColumnInterpreter object
@@ -920,15 +920,15 @@
         return this;
       }
 
-      // @@protoc_insertion_point(builder_scope:AggregateArgument)
+      // @@protoc_insertion_point(builder_scope:AggregateRequest)
     }
 
     static {
-      defaultInstance = new AggregateArgument(true);
+      defaultInstance = new AggregateRequest(true);
       defaultInstance.initFields();
     }
 
-    // @@protoc_insertion_point(class_scope:AggregateArgument)
+    // @@protoc_insertion_point(class_scope:AggregateRequest)
   }
 
   public interface AggregateResponseOrBuilder
@@ -1655,59 +1655,59 @@
 
     public interface Interface {
       /**
-       * rpc GetMax(.AggregateArgument) returns (.AggregateResponse);
+       * rpc GetMax(.AggregateRequest) returns (.AggregateResponse);
        */
       public abstract void getMax(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done);
 
       /**
-       * rpc GetMin(.AggregateArgument) returns (.AggregateResponse);
+       * rpc GetMin(.AggregateRequest) returns (.AggregateResponse);
        */
       public abstract void getMin(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done);
 
       /**
-       * rpc GetSum(.AggregateArgument) returns (.AggregateResponse);
+       * rpc GetSum(.AggregateRequest) returns (.AggregateResponse);
        */
       public abstract void getSum(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done);
 
       /**
-       * rpc GetRowNum(.AggregateArgument) returns (.AggregateResponse);
+       * rpc GetRowNum(.AggregateRequest) returns (.AggregateResponse);
        */
       public abstract void getRowNum(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done);
 
       /**
-       * rpc GetAvg(.AggregateArgument) returns (.AggregateResponse);
+       * rpc GetAvg(.AggregateRequest) returns (.AggregateResponse);
        */
       public abstract void getAvg(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done);
 
       /**
-       * rpc GetStd(.AggregateArgument) returns (.AggregateResponse);
+       * rpc GetStd(.AggregateRequest) returns (.AggregateResponse);
        */
       public abstract void getStd(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done);
 
       /**
-       * rpc GetMedian(.AggregateArgument) returns (.AggregateResponse);
+       * rpc GetMedian(.AggregateRequest) returns (.AggregateResponse);
        */
       public abstract void getMedian(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done);
 
     }
@@ -1718,7 +1718,7 @@
         @java.lang.Override
         public  void getMax(
             com.google.protobuf.RpcController controller,
-            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
             com.google.protobuf.RpcCallback done) {
           impl.getMax(controller, request, done);
         }
@@ -1726,7 +1726,7 @@
         @java.lang.Override
         public  void getMin(
             com.google.protobuf.RpcController controller,
-            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
             com.google.protobuf.RpcCallback done) {
           impl.getMin(controller, request, done);
         }
@@ -1734,7 +1734,7 @@
         @java.lang.Override
         public  void getSum(
             com.google.protobuf.RpcController controller,
-            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
             com.google.protobuf.RpcCallback done) {
           impl.getSum(controller, request, done);
         }
@@ -1742,7 +1742,7 @@
         @java.lang.Override
         public  void getRowNum(
             com.google.protobuf.RpcController controller,
-            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
             com.google.protobuf.RpcCallback done) {
           impl.getRowNum(controller, request, done);
         }
@@ -1750,7 +1750,7 @@
         @java.lang.Override
         public  void getAvg(
             com.google.protobuf.RpcController controller,
-            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
             com.google.protobuf.RpcCallback done) {
           impl.getAvg(controller, request, done);
         }
@@ -1758,7 +1758,7 @@
         @java.lang.Override
         public  void getStd(
             com.google.protobuf.RpcController controller,
-            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
             com.google.protobuf.RpcCallback done) {
           impl.getStd(controller, request, done);
         }
@@ -1766,7 +1766,7 @@
         @java.lang.Override
         public  void getMedian(
             com.google.protobuf.RpcController controller,
-            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+            org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
             com.google.protobuf.RpcCallback done) {
           impl.getMedian(controller, request, done);
         }
@@ -1794,19 +1794,19 @@
           }
           switch(method.getIndex()) {
             case 0:
-              return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+              return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
             case 1:
-              return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+              return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
             case 2:
-              return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+              return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
             case 3:
-              return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+              return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
             case 4:
-              return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+              return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
             case 5:
-              return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+              return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
             case 6:
-              return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request);
+              return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request);
             default:
               throw new java.lang.AssertionError("Can't get here.");
           }
@@ -1822,19 +1822,19 @@
           }
           switch(method.getIndex()) {
             case 0:
-              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
             case 1:
-              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
             case 2:
-              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
             case 3:
-              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
             case 4:
-              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
             case 5:
-              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
             case 6:
-              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+              return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
             default:
               throw new java.lang.AssertionError("Can't get here.");
           }
@@ -1872,59 +1872,59 @@
     }
 
     /**
-     * rpc GetMax(.AggregateArgument) returns (.AggregateResponse);
+     * rpc GetMax(.AggregateRequest) returns (.AggregateResponse);
      */
     public abstract void getMax(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
         com.google.protobuf.RpcCallback done);
 
     /**
-     * rpc GetMin(.AggregateArgument) returns (.AggregateResponse);
+     * rpc GetMin(.AggregateRequest) returns (.AggregateResponse);
      */
     public abstract void getMin(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
         com.google.protobuf.RpcCallback done);
 
     /**
-     * rpc GetSum(.AggregateArgument) returns (.AggregateResponse);
+     * rpc GetSum(.AggregateRequest) returns (.AggregateResponse);
      */
     public abstract void getSum(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
         com.google.protobuf.RpcCallback done);
 
     /**
-     * rpc GetRowNum(.AggregateArgument) returns (.AggregateResponse);
+     * rpc GetRowNum(.AggregateRequest) returns (.AggregateResponse);
      */
     public abstract void getRowNum(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
         com.google.protobuf.RpcCallback done);
 
     /**
-     * rpc GetAvg(.AggregateArgument) returns (.AggregateResponse);
+     * rpc GetAvg(.AggregateRequest) returns (.AggregateResponse);
      */
     public abstract void getAvg(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
         com.google.protobuf.RpcCallback done);
 
     /**
-     * rpc GetStd(.AggregateArgument) returns (.AggregateResponse);
+     * rpc GetStd(.AggregateRequest) returns (.AggregateResponse);
      */
     public abstract void getStd(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
         com.google.protobuf.RpcCallback done);
 
     /**
-     * rpc GetMedian(.AggregateArgument) returns (.AggregateResponse);
+     * rpc GetMedian(.AggregateRequest) returns (.AggregateResponse);
      */
     public abstract void getMedian(
         com.google.protobuf.RpcController controller,
-        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+        org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
         com.google.protobuf.RpcCallback done);
 
     public static final
@@ -1950,37 +1950,37 @@
       }
       switch(method.getIndex()) {
         case 0:
-          this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+          this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
         case 1:
-          this.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+          this.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
         case 2:
-          this.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+          this.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
         case 3:
-          this.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+          this.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
         case 4:
-          this.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+          this.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
         case 5:
-          this.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+          this.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
         case 6:
-          this.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument)request,
+          this.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request,
             com.google.protobuf.RpcUtil.specializeCallback(
               done));
           return;
@@ -1999,19 +1999,19 @@
       }
       switch(method.getIndex()) {
         case 0:
-          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
         case 1:
-          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
         case 2:
-          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
         case 3:
-          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
         case 4:
-          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
         case 5:
-          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
         case 6:
-          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument.getDefaultInstance();
+          return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance();
         default:
           throw new java.lang.AssertionError("Can't get here.");
       }
@@ -2063,7 +2063,7 @@
 
       public  void getMax(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
           getDescriptor().getMethods().get(0),
@@ -2078,7 +2078,7 @@
 
       public  void getMin(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
           getDescriptor().getMethods().get(1),
@@ -2093,7 +2093,7 @@
 
       public  void getSum(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
           getDescriptor().getMethods().get(2),
@@ -2108,7 +2108,7 @@
 
       public  void getRowNum(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
           getDescriptor().getMethods().get(3),
@@ -2123,7 +2123,7 @@
 
       public  void getAvg(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
           getDescriptor().getMethods().get(4),
@@ -2138,7 +2138,7 @@
 
       public  void getStd(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
           getDescriptor().getMethods().get(5),
@@ -2153,7 +2153,7 @@
 
       public  void getMedian(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request,
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
           com.google.protobuf.RpcCallback done) {
         channel.callMethod(
           getDescriptor().getMethods().get(6),
@@ -2175,37 +2175,37 @@
     public interface BlockingInterface {
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException;
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException;
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException;
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException;
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException;
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException;
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException;
     }
 
@@ -2218,7 +2218,7 @@
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
           getDescriptor().getMethods().get(0),
@@ -2230,7 +2230,7 @@
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
           getDescriptor().getMethods().get(1),
@@ -2242,7 +2242,7 @@
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
           getDescriptor().getMethods().get(2),
@@ -2254,7 +2254,7 @@
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
           getDescriptor().getMethods().get(3),
@@ -2266,7 +2266,7 @@
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
           getDescriptor().getMethods().get(4),
@@ -2278,7 +2278,7 @@
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
           getDescriptor().getMethods().get(5),
@@ -2290,7 +2290,7 @@
 
       public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian(
           com.google.protobuf.RpcController controller,
-          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateArgument request)
+          org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request)
           throws com.google.protobuf.ServiceException {
         return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod(
           getDescriptor().getMethods().get(6),
@@ -2305,10 +2305,10 @@
   }
 
   private static com.google.protobuf.Descriptors.Descriptor
-    internal_static_AggregateArgument_descriptor;
+    internal_static_AggregateRequest_descriptor;
   private static
     com.google.protobuf.GeneratedMessage.FieldAccessorTable
-      internal_static_AggregateArgument_fieldAccessorTable;
+      internal_static_AggregateRequest_fieldAccessorTable;
   private static com.google.protobuf.Descriptors.Descriptor
     internal_static_AggregateResponse_descriptor;
   private static
@@ -2323,34 +2323,34 @@
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\017Aggregate.proto\032\014Client.proto\"l\n\021Aggre" +
-      "gateArgument\022\036\n\026interpreter_class_name\030\001" +
-      " \002(\t\022\023\n\004scan\030\002 \002(\0132\005.Scan\022\"\n\032interpreter" +
-      "_specific_bytes\030\003 \001(\014\"<\n\021AggregateRespon" +
-      "se\022\022\n\nfirst_part\030\001 \003(\014\022\023\n\013second_part\030\002 " +
-      "\001(\0142\366\002\n\020AggregateService\0220\n\006GetMax\022\022.Agg" +
-      "regateArgument\032\022.AggregateResponse\0220\n\006Ge" +
-      "tMin\022\022.AggregateArgument\032\022.AggregateResp" +
-      "onse\0220\n\006GetSum\022\022.AggregateArgument\032\022.Agg" +
-      "regateResponse\0223\n\tGetRowNum\022\022.AggregateA",
-      "rgument\032\022.AggregateResponse\0220\n\006GetAvg\022\022." +
-      "AggregateArgument\032\022.AggregateResponse\0220\n" +
-      "\006GetStd\022\022.AggregateArgument\032\022.AggregateR" +
-      "esponse\0223\n\tGetMedian\022\022.AggregateArgument" +
-      "\032\022.AggregateResponseBE\n*org.apache.hadoo" +
-      "p.hbase.protobuf.generatedB\017AggregatePro" +
-      "tosH\001\210\001\001\240\001\001"
+      "\n\017Aggregate.proto\032\014Client.proto\"k\n\020Aggre" +
+      "gateRequest\022\036\n\026interpreter_class_name\030\001 " +
+      "\002(\t\022\023\n\004scan\030\002 \002(\0132\005.Scan\022\"\n\032interpreter_" +
+      "specific_bytes\030\003 \001(\014\"<\n\021AggregateRespons" +
+      "e\022\022\n\nfirst_part\030\001 \003(\014\022\023\n\013second_part\030\002 \001" +
+      "(\0142\357\002\n\020AggregateService\022/\n\006GetMax\022\021.Aggr" +
+      "egateRequest\032\022.AggregateResponse\022/\n\006GetM" +
+      "in\022\021.AggregateRequest\032\022.AggregateRespons" +
+      "e\022/\n\006GetSum\022\021.AggregateRequest\032\022.Aggrega" +
+      "teResponse\0222\n\tGetRowNum\022\021.AggregateReque",
+      "st\032\022.AggregateResponse\022/\n\006GetAvg\022\021.Aggre" +
+      "gateRequest\032\022.AggregateResponse\022/\n\006GetSt" +
+      "d\022\021.AggregateRequest\032\022.AggregateResponse" +
+      "\0222\n\tGetMedian\022\021.AggregateRequest\032\022.Aggre" +
+      "gateResponseBE\n*org.apache.hadoop.hbase." +
+      "protobuf.generatedB\017AggregateProtosH\001\210\001\001" +
+      "\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
         public com.google.protobuf.ExtensionRegistry assignDescriptors(
             com.google.protobuf.Descriptors.FileDescriptor root) {
           descriptor = root;
-          internal_static_AggregateArgument_descriptor =
+          internal_static_AggregateRequest_descriptor =
             getDescriptor().getMessageTypes().get(0);
-          internal_static_AggregateArgument_fieldAccessorTable = new
+          internal_static_AggregateRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
-              internal_static_AggregateArgument_descriptor,
+              internal_static_AggregateRequest_descriptor,
               new java.lang.String[] { "InterpreterClassName", "Scan", "InterpreterSpecificBytes", });
           internal_static_AggregateResponse_descriptor =
             getDescriptor().getMessageTypes().get(1);
Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java
===================================================================
--- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java	(revision 1522009)
+++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java	(working copy)
@@ -2263,7 +2263,7 @@
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\013HFile.proto\032\013hbase.proto\"3\n\rFileInfoPr" +
+      "\n\013HFile.proto\032\013HBase.proto\"3\n\rFileInfoPr" +
       "oto\022\"\n\tmap_entry\030\001 \003(\0132\017.BytesBytesPair\"" +
       "\371\002\n\020FileTrailerProto\022\030\n\020file_info_offset" +
       "\030\001 \001(\004\022 \n\030load_on_open_data_offset\030\002 \001(\004" +
Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
===================================================================
--- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java	(revision 1522009)
+++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java	(working copy)
@@ -20905,7 +20905,7 @@
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\013Admin.proto\032\014Client.proto\032\013hbase.proto" +
+      "\n\013Admin.proto\032\014Client.proto\032\013HBase.proto" +
       "\032\tWAL.proto\"R\n\024GetRegionInfoRequest\022 \n\006r" +
       "egion\030\001 \002(\0132\020.RegionSpecifier\022\030\n\020compact" +
       "ion_state\030\002 \001(\010\"\331\001\n\025GetRegionInfoRespons" +
Index: hbase-protocol/pom.xml
===================================================================
--- hbase-protocol/pom.xml	(revision 1522009)
+++ hbase-protocol/pom.xml	(working copy)
@@ -102,7 +102,7 @@
                     ErrorHandling.proto
                     Filter.proto
                     FS.proto
-                    hbase.proto
+                    HBase.proto
                     HFile.proto
                     LoadBalancer.proto
                     MapReduce.proto
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java	(revision 1522009)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java	(working copy)
@@ -48,8 +48,8 @@
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteSnapshotRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
@@ -216,8 +216,8 @@
   @Test(timeout = 300000)
   public void testGetCompletedSnapshots() throws Exception {
     // first check when there are no snapshots
-    ListSnapshotRequest request = ListSnapshotRequest.newBuilder().build();
-    ListSnapshotResponse response = master.getCompletedSnapshots(null, request);
+    GetCompletedSnapshotsRequest request = GetCompletedSnapshotsRequest.newBuilder().build();
+    GetCompletedSnapshotsResponse response = master.getCompletedSnapshots(null, request);
     assertEquals("Found unexpected number of snapshots", 0, response.getSnapshotsCount());
 
     // write one snapshot to the fs
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java	(revision 1522009)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java	(working copy)
@@ -55,8 +55,8 @@
 import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse;
 import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest;
 import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse;
-import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest;
-import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult;
+import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest;
+import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse;
 import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService;
 import org.apache.hadoop.hbase.regionserver.BaseRowProcessor;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -76,7 +76,7 @@
 import com.sun.org.apache.commons.logging.LogFactory;
 
 /**
- * Verifies ProcessRowEndpoint works.
+ * Verifies ProcessEndpoint works.
  * The tested RowProcessor performs two scans and a read-modify-write.
  */
 @Category(MediumTests.class)
@@ -151,18 +151,18 @@
   @Test
   public void testDoubleScan() throws Throwable {
     prepareTestData();
-    
+
     CoprocessorRpcChannel channel = table.coprocessorService(ROW);
     RowProcessorEndpoint.FriendsOfFriendsProcessor processor =
         new RowProcessorEndpoint.FriendsOfFriendsProcessor(ROW, A);
-    RowProcessorService.BlockingInterface service = 
+    RowProcessorService.BlockingInterface service =
         RowProcessorService.newBlockingStub(channel);
-    RowProcessorRequest request = RowProcessorClient.getRowProcessorPB(processor);
-    RowProcessorResult protoResult = service.process(null, request);
-    FriendsOfFriendsProcessorResponse response = 
+    ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
+    ProcessResponse protoResult = service.process(null, request);
+    FriendsOfFriendsProcessorResponse response =
         FriendsOfFriendsProcessorResponse.parseFrom(protoResult.getRowProcessorResult());
     Set result = new HashSet();
-    result.addAll(response.getResultList()); 
+    result.addAll(response.getResultList());
     Set expected =
       new HashSet(Arrays.asList(new String[]{"d", "e", "f", "g"}));
     Get get = new Get(ROW);
@@ -198,10 +198,10 @@
     CoprocessorRpcChannel channel = table.coprocessorService(ROW);
     RowProcessorEndpoint.IncrementCounterProcessor processor =
         new RowProcessorEndpoint.IncrementCounterProcessor(ROW);
-    RowProcessorService.BlockingInterface service = 
+    RowProcessorService.BlockingInterface service =
         RowProcessorService.newBlockingStub(channel);
-    RowProcessorRequest request = RowProcessorClient.getRowProcessorPB(processor);
-    RowProcessorResult protoResult = service.process(null, request);
+    ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
+    ProcessResponse protoResult = service.process(null, request);
     IncCounterProcessorResponse response = IncCounterProcessorResponse
         .parseFrom(protoResult.getRowProcessorResult());
     Integer result = response.getResponse();
@@ -261,9 +261,9 @@
     CoprocessorRpcChannel channel = table.coprocessorService(ROW);
     RowProcessorEndpoint.RowSwapProcessor processor =
         new RowProcessorEndpoint.RowSwapProcessor(ROW, ROW2);
-    RowProcessorService.BlockingInterface service = 
+    RowProcessorService.BlockingInterface service =
         RowProcessorService.newBlockingStub(channel);
-    RowProcessorRequest request = RowProcessorClient.getRowProcessorPB(processor);
+    ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
     service.process(null, request);
   }
 
@@ -273,9 +273,9 @@
     CoprocessorRpcChannel channel = table.coprocessorService(ROW);
     RowProcessorEndpoint.TimeoutProcessor processor =
         new RowProcessorEndpoint.TimeoutProcessor(ROW);
-    RowProcessorService.BlockingInterface service = 
+    RowProcessorService.BlockingInterface service =
         RowProcessorService.newBlockingStub(channel);
-    RowProcessorRequest request = RowProcessorClient.getRowProcessorPB(processor);
+    ProcessRequest request = RowProcessorClient.getRowProcessorPB(processor);
     boolean exceptionCaught = false;
     try {
       service.process(null, request);
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java	(revision 1522009)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java	(working copy)
@@ -255,14 +255,14 @@
     }
 
     @Override
-    public AuthenticationProtos.TokenResponse getAuthenticationToken(
-        RpcController controller, AuthenticationProtos.TokenRequest request)
+    public AuthenticationProtos.GetAuthenticationTokenResponse getAuthenticationToken(
+        RpcController controller, AuthenticationProtos.GetAuthenticationTokenRequest request)
       throws ServiceException {
       LOG.debug("Authentication token request from "+RequestContext.getRequestUserName());
       // ignore passed in controller -- it's always null
       ServerRpcController serverController = new ServerRpcController();
-      BlockingRpcCallback callback =
-          new BlockingRpcCallback();
+      BlockingRpcCallback callback =
+          new BlockingRpcCallback();
       getAuthenticationToken(serverController, request, callback);
       try {
         serverController.checkFailed();
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java	(revision 1522009)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java	(working copy)
@@ -85,8 +85,8 @@
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
-import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
@@ -4314,10 +4314,10 @@
     p.add(FAMILY, QUALIFIER, VALUE);
     MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, p);
 
-    MultiMutateRequest.Builder mrmBuilder = MultiMutateRequest.newBuilder();
+    MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
     mrmBuilder.addMutationRequest(m1);
     mrmBuilder.addMutationRequest(m2);
-    MultiMutateRequest mrm = mrmBuilder.build();
+    MutateRowsRequest mrm = mrmBuilder.build();
     CoprocessorRpcChannel channel = t.coprocessorService(ROW);
     MultiRowMutationService.BlockingInterface service =
        MultiRowMutationService.newBlockingStub(channel);
Index: hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
===================================================================
--- hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java	(revision 1522009)
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java	(working copy)
@@ -123,8 +123,8 @@
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest;
@@ -147,8 +147,8 @@
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest;
@@ -165,8 +165,8 @@
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos;
@@ -1453,10 +1453,10 @@
   }
 
   @Override
-  public CatalogScanResponse runCatalogScan(RpcController c,
-      CatalogScanRequest req) throws ServiceException {
+  public RunCatalogScanResponse runCatalogScan(RpcController c,
+      RunCatalogScanRequest req) throws ServiceException {
     try {
-      return ResponseConverter.buildCatalogScanResponse(catalogJanitorChore.scan());
+      return ResponseConverter.buildRunCatalogScanResponse(catalogJanitorChore.scan());
     } catch (IOException ioe) {
       throw new ServiceException(ioe);
     }
@@ -2869,7 +2869,7 @@
    * {@inheritDoc}
    */
   @Override
-  public TakeSnapshotResponse snapshot(RpcController controller, TakeSnapshotRequest request)
+  public SnapshotResponse snapshot(RpcController controller, SnapshotRequest request)
       throws ServiceException {
     try {
       this.snapshotManager.checkSnapshotSupport();
@@ -2891,17 +2891,17 @@
     // send back the max amount of time the client should wait for the snapshot to complete
     long waitTime = SnapshotDescriptionUtils.getMaxMasterTimeout(conf, snapshot.getType(),
       SnapshotDescriptionUtils.DEFAULT_MAX_WAIT_TIME);
-    return TakeSnapshotResponse.newBuilder().setExpectedTimeout(waitTime).build();
+    return SnapshotResponse.newBuilder().setExpectedTimeout(waitTime).build();
   }
 
   /**
    * List the currently available/stored snapshots. Any in-progress snapshots are ignored
    */
   @Override
-  public ListSnapshotResponse getCompletedSnapshots(RpcController controller,
-      ListSnapshotRequest request) throws ServiceException {
+  public GetCompletedSnapshotsResponse getCompletedSnapshots(RpcController controller,
+      GetCompletedSnapshotsRequest request) throws ServiceException {
     try {
-      ListSnapshotResponse.Builder builder = ListSnapshotResponse.newBuilder();
+      GetCompletedSnapshotsResponse.Builder builder = GetCompletedSnapshotsResponse.newBuilder();
       List snapshots = snapshotManager.getCompletedSnapshots();
 
       // convert to protobuf
Index: hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
===================================================================
--- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java	(revision 1522009)
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java	(working copy)
@@ -26,8 +26,8 @@
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.protobuf.ResponseConverter;
-import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest;
-import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult;
+import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest;
+import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse;
 import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RowProcessor;
@@ -61,16 +61,16 @@
    * the read-modify-write procedure.
    */
   @Override
-  public void process(RpcController controller, RowProcessorRequest request,
-      RpcCallback done) {
-    RowProcessorResult resultProto = null;
+  public void process(RpcController controller, ProcessRequest request,
+      RpcCallback done) {
+    ProcessResponse resultProto = null;
     try {
       RowProcessor processor = constructRowProcessorFromRequest(request);
       HRegion region = env.getRegion();
       region.processRowsWithLocks(processor);
       T result = processor.getResult();
-      RowProcessorResult.Builder b = RowProcessorResult.newBuilder();
-      b.setRowProcessorResult(result.toByteString()); 
+      ProcessResponse.Builder b = ProcessResponse.newBuilder();
+      b.setRowProcessorResult(result.toByteString());
       resultProto = b.build();
     } catch (Exception e) {
       ResponseConverter.setControllerException(controller, new IOException(e));
@@ -108,7 +108,7 @@
   }
 
   @SuppressWarnings("unchecked")
-  RowProcessor constructRowProcessorFromRequest(RowProcessorRequest request)
+  RowProcessor constructRowProcessorFromRequest(ProcessRequest request)
       throws IOException {
     String className = request.getRowProcessorClassName();
     Class cls;
Index: hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
===================================================================
--- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java	(revision 1522009)
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java	(working copy)
@@ -35,9 +35,9 @@
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.ResponseConverter;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
-import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse;
-import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService;
 
 import com.google.protobuf.RpcCallback;
 import com.google.protobuf.RpcController;
@@ -63,13 +63,13 @@
  * ...
  * Mutate m1 = ProtobufUtil.toMutate(MutateType.PUT, p1);
  * Mutate m2 = ProtobufUtil.toMutate(MutateType.PUT, p2);
- * MultiMutateRequest.Builder mrmBuilder = MultiMutateRequest.newBuilder();
+ * MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
  * mrmBuilder.addMutationRequest(m1);
  * mrmBuilder.addMutationRequest(m2);
  * CoprocessorRpcChannel channel = t.coprocessorService(ROW);
  * MultiRowMutationService.BlockingInterface service = 
  *    MultiRowMutationService.newBlockingStub(channel);
- * MultiMutateRequest mrm = mrmBuilder.build();
+ * MutateRowsRequest mrm = mrmBuilder.build();
  * service.mutateRows(null, mrm);
  * 
*/ @@ -79,9 +79,9 @@ CoprocessorService, Coprocessor { private RegionCoprocessorEnvironment env; @Override - public void mutateRows(RpcController controller, MultiMutateRequest request, - RpcCallback done) { - MultiMutateResponse response = MultiMutateResponse.getDefaultInstance(); + public void mutateRows(RpcController controller, MutateRowsRequest request, + RpcCallback done) { + MutateRowsResponse response = MutateRowsResponse.getDefaultInstance(); try { // set of rows to lock, sorted to avoid deadlocks SortedSet rowsToLock = new TreeSet(Bytes.BYTES_COMPARATOR); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java (revision 1522009) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java (working copy) @@ -98,10 +98,10 @@ @Override public void getAuthenticationToken(RpcController controller, - AuthenticationProtos.TokenRequest request, - RpcCallback done) { - AuthenticationProtos.TokenResponse.Builder response = - AuthenticationProtos.TokenResponse.newBuilder(); + AuthenticationProtos.GetAuthenticationTokenRequest request, + RpcCallback done) { + AuthenticationProtos.GetAuthenticationTokenResponse.Builder response = + AuthenticationProtos.GetAuthenticationTokenResponse.newBuilder(); try { if (secretManager == null) { Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java (revision 1522009) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java (working copy) @@ -57,8 +57,8 @@ CoprocessorRpcChannel rpcChannel = meta.coprocessorService(HConstants.EMPTY_START_ROW); AuthenticationProtos.AuthenticationService.BlockingInterface service = AuthenticationProtos.AuthenticationService.newBlockingStub(rpcChannel); - AuthenticationProtos.TokenResponse response = service.getAuthenticationToken(null, - AuthenticationProtos.TokenRequest.getDefaultInstance()); + AuthenticationProtos.GetAuthenticationTokenResponse response = service.getAuthenticationToken(null, + AuthenticationProtos.GetAuthenticationTokenRequest.getDefaultInstance()); return ProtobufUtil.toToken(response.getToken()); } catch (ServiceException se) { Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java (revision 1522009) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java (working copy) @@ -1325,9 +1325,9 @@ @Override public void getUserPermissions(RpcController controller, - AccessControlProtos.UserPermissionsRequest request, - RpcCallback done) { - AccessControlProtos.UserPermissionsResponse response = null; + AccessControlProtos.GetUserPermissionsRequest request, + RpcCallback done) { + AccessControlProtos.GetUserPermissionsResponse response = null; try { // only allowed to be called on _acl_ region if (aclRegion) { @@ -1348,7 +1348,7 @@ perms = AccessControlLists.getUserPermissions( regionEnv.getConfiguration(), null); } - response = ResponseConverter.buildUserPermissionsResponse(perms); + response = ResponseConverter.buildGetUserPermissionsResponse(perms); } else { throw new CoprocessorException(AccessController.class, "This method " + "can only execute at " + AccessControlLists.ACL_TABLE_NAME + " table."); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java (revision 1522009) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java (working copy) @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; -import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest; -import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiRowMutationService; +import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest; +import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService; import org.apache.hadoop.hbase.util.Bytes; import com.google.protobuf.ServiceException; @@ -378,7 +378,7 @@ */ private static void multiMutate(HTable table, byte[] row, Mutation... mutations) throws IOException { CoprocessorRpcChannel channel = table.coprocessorService(row); - MultiMutateRequest.Builder mmrBuilder = MultiMutateRequest.newBuilder(); + MutateRowsRequest.Builder mmrBuilder = MutateRowsRequest.newBuilder(); for (Mutation mutation : mutations) { if (mutation instanceof Put) { mmrBuilder.addMutationRequest(ProtobufUtil.toMutation(MutationType.PUT, mutation)); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java (revision 1522009) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java (working copy) @@ -22,7 +22,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest; import org.apache.hadoop.hbase.regionserver.RowProcessor; import com.google.protobuf.Message; @@ -35,10 +35,10 @@ @InterfaceStability.Evolving public class RowProcessorClient { public static - RowProcessorRequest getRowProcessorPB(RowProcessor r) + ProcessRequest getRowProcessorPB(RowProcessor r) throws IOException { - final RowProcessorRequest.Builder requestBuilder = - RowProcessorRequest.newBuilder(); + final ProcessRequest.Builder requestBuilder = + ProcessRequest.newBuilder(); requestBuilder.setRowProcessorClassName(r.getClass().getName()); S s = r.getRequestData(); if (s != null) { Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (revision 1522009) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (working copy) @@ -78,8 +78,8 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceResponse; -import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest; -import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateNamespaceRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateNamespaceResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest; @@ -110,8 +110,8 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListNamespaceDescriptorsRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListNamespaceDescriptorsResponse; -import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest; -import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListTableDescriptorsByNamespaceRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListTableDescriptorsByNamespaceResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListTableNamesByNamespaceRequest; @@ -135,8 +135,8 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterResponse; -import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest; -import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse; +import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest; +import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest; @@ -171,7 +171,7 @@ * A non-instantiable class that manages creation of {@link HConnection}s. *

The simplest way to use this class is by using {@link #createConnection(Configuration)}. * This creates a new {@link HConnection} that is managed by the caller. - * From this {@link HConnection} {@link HTableInterface} implementations are retrieved + * From this {@link HConnection} {@link HTableInterface} implementations are retrieved * with {@link HConnection#getTable(byte[])}. Example: *

  * {@code
@@ -646,7 +646,7 @@
      HConnectionImplementation(Configuration conf, boolean managed) throws IOException {
        this(conf, managed, null, null);
      }
-     
+
     /**
      * constructor
      * @param conf Configuration object
@@ -713,7 +713,7 @@
           HConstants.HBASE_CLIENT_PREFETCH_LIMIT,
           HConstants.DEFAULT_HBASE_CLIENT_PREFETCH_LIMIT);
     }
- 
+
     @Override
     public HTableInterface getTable(String tableName) throws IOException {
       return getTable(TableName.valueOf(tableName));
@@ -1188,7 +1188,7 @@
           // the second will use the value that the first one found.
           synchronized (regionLockObject) {
             // Check the cache again for a hit in case some other thread made the
-            // same query while we were waiting on the lock. 
+            // same query while we were waiting on the lock.
             if (useCache) {
               location = getCachedLocation(tableName, row);
               if (location != null) {
@@ -2079,8 +2079,8 @@
         }
 
         @Override
-        public CatalogScanResponse runCatalogScan(RpcController controller,
-            CatalogScanRequest request) throws ServiceException {
+        public RunCatalogScanResponse runCatalogScan(RpcController controller,
+            RunCatalogScanRequest request) throws ServiceException {
           return stub.runCatalogScan(controller, request);
         }
 
@@ -2106,14 +2106,14 @@
         }
 
         @Override
-        public TakeSnapshotResponse snapshot(RpcController controller,
-            TakeSnapshotRequest request) throws ServiceException {
+        public SnapshotResponse snapshot(RpcController controller,
+            SnapshotRequest request) throws ServiceException {
           return stub.snapshot(controller, request);
         }
 
         @Override
-        public ListSnapshotResponse getCompletedSnapshots(
-            RpcController controller, ListSnapshotRequest request)
+        public GetCompletedSnapshotsResponse getCompletedSnapshots(
+            RpcController controller, GetCompletedSnapshotsRequest request)
             throws ServiceException {
           return stub.getCompletedSnapshots(controller, request);
         }
Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
===================================================================
--- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java	(revision 1522009)
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java	(working copy)
@@ -103,7 +103,7 @@
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsRestoreSnapshotDoneResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ListSnapshotRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.GetCompletedSnapshotsRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyColumnRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ModifyTableRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.MoveRegionRequest;
@@ -112,8 +112,8 @@
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SetBalancerRunningRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.ShutdownRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.StopMasterRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.UnassignRegionRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetClusterStatusRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetSchemaAlterStatusRequest;
@@ -2655,7 +2655,7 @@
   public void snapshot(SnapshotDescription snapshot) throws IOException, SnapshotCreationException,
       IllegalArgumentException {
     // actually take the snapshot
-    TakeSnapshotResponse response = takeSnapshotAsync(snapshot);
+    SnapshotResponse response = takeSnapshotAsync(snapshot);
     final IsSnapshotDoneRequest request = IsSnapshotDoneRequest.newBuilder().setSnapshot(snapshot)
         .build();
     IsSnapshotDoneResponse done = null;
@@ -2704,15 +2704,15 @@
    * @throws SnapshotCreationException if snapshot creation failed
    * @throws IllegalArgumentException if the snapshot request is formatted incorrectly
    */
-  public TakeSnapshotResponse takeSnapshotAsync(SnapshotDescription snapshot) throws IOException,
+  public SnapshotResponse takeSnapshotAsync(SnapshotDescription snapshot) throws IOException,
       SnapshotCreationException {
     ClientSnapshotDescriptionUtils.assertSnapshotRequestIsValid(snapshot);
-    final TakeSnapshotRequest request = TakeSnapshotRequest.newBuilder().setSnapshot(snapshot)
+    final SnapshotRequest request = SnapshotRequest.newBuilder().setSnapshot(snapshot)
         .build();
     // run the snapshot on the master
-    return executeCallable(new MasterAdminCallable(getConnection()) {
+    return executeCallable(new MasterAdminCallable(getConnection()) {
       @Override
-      public TakeSnapshotResponse call() throws ServiceException {
+      public SnapshotResponse call() throws ServiceException {
         return masterAdmin.snapshot(null, request);
       }
     });
@@ -2964,7 +2964,7 @@
     return executeCallable(new MasterAdminCallable>(getConnection()) {
       @Override
       public List call() throws ServiceException {
-        return masterAdmin.getCompletedSnapshots(null, ListSnapshotRequest.newBuilder().build())
+        return masterAdmin.getCompletedSnapshots(null, GetCompletedSnapshotsRequest.newBuilder().build())
             .getSnapshotsList();
       }
     });
Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java
===================================================================
--- hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java	(revision 1522009)
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java	(working copy)
@@ -127,11 +127,11 @@
 
             @Override
             public Boolean call(SecureBulkLoadProtos.SecureBulkLoadService instance) throws IOException {
-              SecureBulkLoadProtos.DelegationTokenProto protoDT =
-                  SecureBulkLoadProtos.DelegationTokenProto.newBuilder().build();
+              SecureBulkLoadProtos.DelegationToken protoDT =
+                  SecureBulkLoadProtos.DelegationToken.newBuilder().build();
               if(userToken != null) {
                 protoDT =
-                  SecureBulkLoadProtos.DelegationTokenProto.newBuilder()
+                  SecureBulkLoadProtos.DelegationToken.newBuilder()
                      .setIdentifier(ByteString.copyFrom(userToken.getIdentifier()))
                      .setPassword(ByteString.copyFrom(userToken.getPassword()))
                      .setKind(userToken.getKind().toString())
Index: hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
===================================================================
--- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java	(revision 1522009)
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java	(working copy)
@@ -31,7 +31,7 @@
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
-import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UserPermissionsResponse;
+import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoResponse;
@@ -42,7 +42,7 @@
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanResponse;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.EnableCatalogJanitorResponse;
 import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse;
 import org.apache.hadoop.hbase.regionserver.RegionOpeningState;
@@ -109,11 +109,11 @@
   }
 
   /**
-   * Converts the permissions list into a protocol buffer UserPermissionsResponse
+   * Converts the permissions list into a protocol buffer GetUserPermissionsResponse
    */
-  public static UserPermissionsResponse buildUserPermissionsResponse(
+  public static GetUserPermissionsResponse buildGetUserPermissionsResponse(
       final List permissions) {
-    UserPermissionsResponse.Builder builder = UserPermissionsResponse.newBuilder();
+    GetUserPermissionsResponse.Builder builder = GetUserPermissionsResponse.newBuilder();
     for (UserPermission perm : permissions) {
       builder.addUserPermission(ProtobufUtil.toUserPermission(perm));
     }
@@ -227,10 +227,10 @@
 
   /**
    * Creates a response for the catalog scan request
-   * @return A CatalogScanResponse
+   * @return A RunCatalogScanResponse
    */
-  public static CatalogScanResponse buildCatalogScanResponse(int numCleaned) {
-    return CatalogScanResponse.newBuilder().setScanResult(numCleaned).build();
+  public static RunCatalogScanResponse buildRunCatalogScanResponse(int numCleaned) {
+    return RunCatalogScanResponse.newBuilder().setScanResult(numCleaned).build();
   }
 
   /**
Index: hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
===================================================================
--- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java	(revision 1522009)
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java	(working copy)
@@ -1939,11 +1939,11 @@
    */
   public static List getUserPermissions(
       AccessControlService.BlockingInterface protocol) throws ServiceException {
-    AccessControlProtos.UserPermissionsRequest.Builder builder =
-      AccessControlProtos.UserPermissionsRequest.newBuilder();
+    AccessControlProtos.GetUserPermissionsRequest.Builder builder =
+      AccessControlProtos.GetUserPermissionsRequest.newBuilder();
     builder.setType(AccessControlProtos.Permission.Type.Global);
-    AccessControlProtos.UserPermissionsRequest request = builder.build();
-    AccessControlProtos.UserPermissionsResponse response =
+    AccessControlProtos.GetUserPermissionsRequest request = builder.build();
+    AccessControlProtos.GetUserPermissionsResponse response =
       protocol.getUserPermissions(null, request);
     List perms = new ArrayList();
     for (AccessControlProtos.UserPermission perm: response.getUserPermissionList()) {
@@ -1964,14 +1964,14 @@
   public static List getUserPermissions(
       AccessControlService.BlockingInterface protocol,
       TableName t) throws ServiceException {
-    AccessControlProtos.UserPermissionsRequest.Builder builder =
-      AccessControlProtos.UserPermissionsRequest.newBuilder();
+    AccessControlProtos.GetUserPermissionsRequest.Builder builder =
+      AccessControlProtos.GetUserPermissionsRequest.newBuilder();
     if (t != null) {
       builder.setTableName(ProtobufUtil.toProtoTableName(t));
     }
     builder.setType(AccessControlProtos.Permission.Type.Table);
-    AccessControlProtos.UserPermissionsRequest request = builder.build();
-    AccessControlProtos.UserPermissionsResponse response =
+    AccessControlProtos.GetUserPermissionsRequest request = builder.build();
+    AccessControlProtos.GetUserPermissionsResponse response =
       protocol.getUserPermissions(null, request);
     List perms = new ArrayList();
     for (AccessControlProtos.UserPermission perm: response.getUserPermissionList()) {
Index: hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
===================================================================
--- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java	(revision 1522009)
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java	(working copy)
@@ -78,7 +78,7 @@
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AddColumnRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.AssignRegionRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.BalanceRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CatalogScanRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.RunCatalogScanRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteColumnRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.DeleteTableRequest;
@@ -1206,14 +1206,14 @@
   /**
    * @see {@link #buildCatalogScanRequest}
    */
-  private static final CatalogScanRequest CATALOG_SCAN_REQUEST =
-    CatalogScanRequest.newBuilder().build();
+  private static final RunCatalogScanRequest CATALOG_SCAN_REQUEST =
+    RunCatalogScanRequest.newBuilder().build();
 
   /**
    * Creates a request for running a catalog scan
-   * @return A {@link CatalogScanRequest}
+   * @return A {@link RunCatalogScanRequest}
    */
-  public static CatalogScanRequest buildCatalogScanRequest() {
+  public static RunCatalogScanRequest buildCatalogScanRequest() {
     return CATALOG_SCAN_REQUEST;
   }
 
Index: hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
===================================================================
--- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java	(revision 1522009)
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java	(working copy)
@@ -66,7 +66,7 @@
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation;
-import org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo;
+import org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo;
 import org.apache.hadoop.hbase.security.AuthMethod;
 import org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
 import org.apache.hadoop.hbase.security.SecurityInfo;
Index: hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
===================================================================
--- hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java	(revision 1522009)
+++ hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java	(working copy)
@@ -35,8 +35,8 @@
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneRequest;
 import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.IsSnapshotDoneResponse;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.TakeSnapshotResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.SnapshotResponse;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
@@ -85,13 +85,13 @@
     Mockito.when(mockConnection.getConfiguration()).thenReturn(conf);
     Mockito.when(mockConnection.getKeepAliveMasterAdminService()).thenReturn(mockMaster);
     // set the max wait time for the snapshot to complete
-    TakeSnapshotResponse response = TakeSnapshotResponse.newBuilder()
+    SnapshotResponse response = SnapshotResponse.newBuilder()
         .setExpectedTimeout(maxWaitTime)
         .build();
     Mockito
         .when(
           mockMaster.snapshot((RpcController) Mockito.isNull(),
-            Mockito.any(TakeSnapshotRequest.class))).thenReturn(response);
+            Mockito.any(SnapshotRequest.class))).thenReturn(response);
     // setup the response
     IsSnapshotDoneResponse.Builder builder = IsSnapshotDoneResponse.newBuilder();
     builder.setDone(false);
@@ -142,9 +142,9 @@
     // mock the master connection
     MasterAdminKeepAliveConnection master = Mockito.mock(MasterAdminKeepAliveConnection.class);
     Mockito.when(mockConnection.getKeepAliveMasterAdminService()).thenReturn(master);
-    TakeSnapshotResponse response = TakeSnapshotResponse.newBuilder().setExpectedTimeout(0).build();
+    SnapshotResponse response = SnapshotResponse.newBuilder().setExpectedTimeout(0).build();
     Mockito.when(
-      master.snapshot((RpcController) Mockito.isNull(), Mockito.any(TakeSnapshotRequest.class)))
+      master.snapshot((RpcController) Mockito.isNull(), Mockito.any(SnapshotRequest.class)))
         .thenReturn(response);
     IsSnapshotDoneResponse doneResponse = IsSnapshotDoneResponse.newBuilder().setDone(true).build();
     Mockito.when(