diff --git dev-support/build-proto.sh dev-support/build-proto.sh
index b4ae479..e69de29 100755
--- dev-support/build-proto.sh
+++ dev-support/build-proto.sh
@@ -1,36 +0,0 @@
-#!/bin/bash
-##
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##
-# script to run protoc to generate protocol buf files.
-# usage: ./build-proto.sh
-#
-
-which protoc
-if [ $? != 0 ] ; then
- echo "Must have protoc compiler in your path to generate code"
- exit 1
-fi
-
-HBASE_DIR=`dirname $0`/..
-PROTO_DIR=$HBASE_DIR/hbase-protocol/src/main/protobuf
-JAVA_DIR=$HBASE_DIR/hbase-protocol/src/main/java
-
-set -x
-for f in $PROTO_DIR/*.proto ; do
- protoc -I$PROTO_DIR --java_out=$JAVA_DIR $f
-done
\ No newline at end of file
diff --git hbase-examples/pom.xml hbase-examples/pom.xml
index 2a16ca4..4f89469 100644
--- hbase-examples/pom.xml
+++ hbase-examples/pom.xml
@@ -245,5 +245,44 @@ if we can combine these profiles somehow -->
-
+
+ compile-protobuf
+
+
+ compile-protobuf
+
+
+
+
+
+ org.apache.hadoop
+ hadoop-maven-plugins
+
+
+ compile-protoc
+ generate-sources
+
+ protoc
+
+
+
+ ${basedir}/src/main/protobuf
+ ${basedir}/../hbase-protocol/src/main/protobuf
+
+
+ ${basedir}/src/main/protobuf
+
+ BulkDelete.proto
+ Examples.proto
+
+
+
+
+
+
+
+
+
+
+
diff --git hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
index 2d421e1..911fd16 100644
--- hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
+++ hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
@@ -10,68 +10,218 @@ public final class BulkDeleteProtos {
}
public interface BulkDeleteRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required .Scan scan = 1;
+ /**
+ * required .Scan scan = 1;
+ */
boolean hasScan();
+ /**
+ * required .Scan scan = 1;
+ */
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
+ /**
+ * required .Scan scan = 1;
+ */
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
-
+
// required .BulkDeleteRequest.DeleteType deleteType = 2;
+ /**
+ * required .BulkDeleteRequest.DeleteType deleteType = 2;
+ */
boolean hasDeleteType();
+ /**
+ * required .BulkDeleteRequest.DeleteType deleteType = 2;
+ */
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType();
-
+
// optional uint64 timestamp = 3;
+ /**
+ * optional uint64 timestamp = 3;
+ */
boolean hasTimestamp();
+ /**
+ * optional uint64 timestamp = 3;
+ */
long getTimestamp();
-
+
// required uint32 rowBatchSize = 4;
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
boolean hasRowBatchSize();
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
int getRowBatchSize();
}
+ /**
+ * Protobuf type {@code BulkDeleteRequest}
+ */
public static final class BulkDeleteRequest extends
com.google.protobuf.GeneratedMessage
implements BulkDeleteRequestOrBuilder {
// Use BulkDeleteRequest.newBuilder() to construct.
- private BulkDeleteRequest(Builder builder) {
+ private BulkDeleteRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private BulkDeleteRequest(boolean noInit) {}
-
+ private BulkDeleteRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
private static final BulkDeleteRequest defaultInstance;
public static BulkDeleteRequest getDefaultInstance() {
return defaultInstance;
}
-
+
public BulkDeleteRequest getDefaultInstanceForType() {
return defaultInstance;
}
-
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private BulkDeleteRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = scan_.toBuilder();
+ }
+ scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(scan_);
+ scan_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 16: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(2, rawValue);
+ } else {
+ bitField0_ |= 0x00000002;
+ deleteType_ = value;
+ }
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ timestamp_ = input.readUInt64();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ rowBatchSize_ = input.readUInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_fieldAccessorTable;
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
}
-
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public BulkDeleteRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new BulkDeleteRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ /**
+ * Protobuf enum {@code BulkDeleteRequest.DeleteType}
+ */
public enum DeleteType
implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * ROW = 0;
+ */
ROW(0, 0),
+ /**
+ * FAMILY = 1;
+ */
FAMILY(1, 1),
+ /**
+ * COLUMN = 2;
+ */
COLUMN(2, 2),
+ /**
+ * VERSION = 3;
+ */
VERSION(3, 3),
;
-
+
+ /**
+ * ROW = 0;
+ */
public static final int ROW_VALUE = 0;
+ /**
+ * FAMILY = 1;
+ */
public static final int FAMILY_VALUE = 1;
+ /**
+ * COLUMN = 2;
+ */
public static final int COLUMN_VALUE = 2;
+ /**
+ * VERSION = 3;
+ */
public static final int VERSION_VALUE = 3;
-
-
+
+
public final int getNumber() { return value; }
-
+
public static DeleteType valueOf(int value) {
switch (value) {
case 0: return ROW;
@@ -81,7 +231,7 @@ public final class BulkDeleteProtos {
default: return null;
}
}
-
+
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
@@ -93,7 +243,7 @@ public final class BulkDeleteProtos {
return DeleteType.valueOf(number);
}
};
-
+
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
@@ -106,11 +256,9 @@ public final class BulkDeleteProtos {
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor().getEnumTypes().get(0);
}
-
- private static final DeleteType[] VALUES = {
- ROW, FAMILY, COLUMN, VERSION,
- };
-
+
+ private static final DeleteType[] VALUES = values();
+
public static DeleteType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
@@ -119,62 +267,89 @@ public final class BulkDeleteProtos {
}
return VALUES[desc.getIndex()];
}
-
+
private final int index;
private final int value;
-
+
private DeleteType(int index, int value) {
this.index = index;
this.value = value;
}
-
+
// @@protoc_insertion_point(enum_scope:BulkDeleteRequest.DeleteType)
}
-
+
private int bitField0_;
// required .Scan scan = 1;
public static final int SCAN_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
+ /**
+ * required .Scan scan = 1;
+ */
public boolean hasScan() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * required .Scan scan = 1;
+ */
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
return scan_;
}
+ /**
+ * required .Scan scan = 1;
+ */
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
return scan_;
}
-
+
// required .BulkDeleteRequest.DeleteType deleteType = 2;
public static final int DELETETYPE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_;
+ /**
+ * required .BulkDeleteRequest.DeleteType deleteType = 2;
+ */
public boolean hasDeleteType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
+ /**
+ * required .BulkDeleteRequest.DeleteType deleteType = 2;
+ */
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() {
return deleteType_;
}
-
+
// optional uint64 timestamp = 3;
public static final int TIMESTAMP_FIELD_NUMBER = 3;
private long timestamp_;
+ /**
+ * optional uint64 timestamp = 3;
+ */
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
+ /**
+ * optional uint64 timestamp = 3;
+ */
public long getTimestamp() {
return timestamp_;
}
-
+
// required uint32 rowBatchSize = 4;
public static final int ROWBATCHSIZE_FIELD_NUMBER = 4;
private int rowBatchSize_;
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
public boolean hasRowBatchSize() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
public int getRowBatchSize() {
return rowBatchSize_;
}
-
+
private void initFields() {
scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
@@ -185,7 +360,7 @@ public final class BulkDeleteProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasScan()) {
memoizedIsInitialized = 0;
return false;
@@ -205,7 +380,7 @@ public final class BulkDeleteProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -223,12 +398,12 @@ public final class BulkDeleteProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -250,14 +425,14 @@ public final class BulkDeleteProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -267,7 +442,7 @@ public final class BulkDeleteProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) obj;
-
+
boolean result = true;
result = result && (hasScan() == other.hasScan());
if (hasScan()) {
@@ -293,9 +468,13 @@ public final class BulkDeleteProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
+
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasScan()) {
@@ -315,89 +494,79 @@ public final class BulkDeleteProtos {
hash = (53 * hash) + getRowBatchSize();
}
hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
+ /**
+ * Protobuf type {@code BulkDeleteRequest}
+ */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequestOrBuilder {
@@ -405,18 +574,21 @@ public final class BulkDeleteProtos {
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_fieldAccessorTable;
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
}
-
+
// Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(BuilderParent parent) {
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -428,7 +600,7 @@ public final class BulkDeleteProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
if (scanBuilder_ == null) {
@@ -445,20 +617,20 @@ public final class BulkDeleteProtos {
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor();
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_descriptor;
}
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest build() {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = buildPartial();
if (!result.isInitialized()) {
@@ -466,17 +638,7 @@ public final class BulkDeleteProtos {
}
return result;
}
-
- private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildPartial() {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this);
int from_bitField0_ = bitField0_;
@@ -505,7 +667,7 @@ public final class BulkDeleteProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) {
return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)other);
@@ -514,7 +676,7 @@ public final class BulkDeleteProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other) {
if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance()) return this;
if (other.hasScan()) {
@@ -532,7 +694,7 @@ public final class BulkDeleteProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasScan()) {
@@ -552,73 +714,39 @@ public final class BulkDeleteProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder();
- if (hasScan()) {
- subBuilder.mergeFrom(getScan());
- }
- input.readMessage(subBuilder, extensionRegistry);
- setScan(subBuilder.buildPartial());
- break;
- }
- case 16: {
- int rawValue = input.readEnum();
- org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.valueOf(rawValue);
- if (value == null) {
- unknownFields.mergeVarintField(2, rawValue);
- } else {
- bitField0_ |= 0x00000002;
- deleteType_ = value;
- }
- break;
- }
- case 24: {
- bitField0_ |= 0x00000004;
- timestamp_ = input.readUInt64();
- break;
- }
- case 32: {
- bitField0_ |= 0x00000008;
- rowBatchSize_ = input.readUInt32();
- break;
- }
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
}
}
+ return this;
}
-
private int bitField0_;
-
+
// required .Scan scan = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
+ /**
+ * required .Scan scan = 1;
+ */
public boolean hasScan() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * required .Scan scan = 1;
+ */
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
if (scanBuilder_ == null) {
return scan_;
@@ -626,6 +754,9 @@ public final class BulkDeleteProtos {
return scanBuilder_.getMessage();
}
}
+ /**
+ * required .Scan scan = 1;
+ */
public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
if (value == null) {
@@ -639,6 +770,9 @@ public final class BulkDeleteProtos {
bitField0_ |= 0x00000001;
return this;
}
+ /**
+ * required .Scan scan = 1;
+ */
public Builder setScan(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
if (scanBuilder_ == null) {
@@ -650,6 +784,9 @@ public final class BulkDeleteProtos {
bitField0_ |= 0x00000001;
return this;
}
+ /**
+ * required .Scan scan = 1;
+ */
public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
@@ -666,6 +803,9 @@ public final class BulkDeleteProtos {
bitField0_ |= 0x00000001;
return this;
}
+ /**
+ * required .Scan scan = 1;
+ */
public Builder clearScan() {
if (scanBuilder_ == null) {
scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
@@ -676,11 +816,17 @@ public final class BulkDeleteProtos {
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
+ /**
+ * required .Scan scan = 1;
+ */
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getScanFieldBuilder().getBuilder();
}
+ /**
+ * required .Scan scan = 1;
+ */
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
if (scanBuilder_ != null) {
return scanBuilder_.getMessageOrBuilder();
@@ -688,6 +834,9 @@ public final class BulkDeleteProtos {
return scan_;
}
}
+ /**
+ * required .Scan scan = 1;
+ */
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
getScanFieldBuilder() {
@@ -701,15 +850,24 @@ public final class BulkDeleteProtos {
}
return scanBuilder_;
}
-
+
// required .BulkDeleteRequest.DeleteType deleteType = 2;
private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
+ /**
+ * required .BulkDeleteRequest.DeleteType deleteType = 2;
+ */
public boolean hasDeleteType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
+ /**
+ * required .BulkDeleteRequest.DeleteType deleteType = 2;
+ */
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() {
return deleteType_;
}
+ /**
+ * required .BulkDeleteRequest.DeleteType deleteType = 2;
+ */
public Builder setDeleteType(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value) {
if (value == null) {
throw new NullPointerException();
@@ -719,126 +877,249 @@ public final class BulkDeleteProtos {
onChanged();
return this;
}
+ /**
+ * required .BulkDeleteRequest.DeleteType deleteType = 2;
+ */
public Builder clearDeleteType() {
bitField0_ = (bitField0_ & ~0x00000002);
deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
onChanged();
return this;
}
-
+
// optional uint64 timestamp = 3;
private long timestamp_ ;
+ /**
+ * optional uint64 timestamp = 3;
+ */
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
+ /**
+ * optional uint64 timestamp = 3;
+ */
public long getTimestamp() {
return timestamp_;
}
+ /**
+ * optional uint64 timestamp = 3;
+ */
public Builder setTimestamp(long value) {
bitField0_ |= 0x00000004;
timestamp_ = value;
onChanged();
return this;
}
+ /**
+ * optional uint64 timestamp = 3;
+ */
public Builder clearTimestamp() {
bitField0_ = (bitField0_ & ~0x00000004);
timestamp_ = 0L;
onChanged();
return this;
}
-
+
// required uint32 rowBatchSize = 4;
private int rowBatchSize_ ;
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
public boolean hasRowBatchSize() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
public int getRowBatchSize() {
return rowBatchSize_;
}
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
public Builder setRowBatchSize(int value) {
bitField0_ |= 0x00000008;
rowBatchSize_ = value;
onChanged();
return this;
}
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
public Builder clearRowBatchSize() {
bitField0_ = (bitField0_ & ~0x00000008);
rowBatchSize_ = 0;
onChanged();
return this;
}
-
+
// @@protoc_insertion_point(builder_scope:BulkDeleteRequest)
}
-
+
static {
defaultInstance = new BulkDeleteRequest(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:BulkDeleteRequest)
}
-
+
public interface BulkDeleteResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required uint64 rowsDeleted = 1;
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
boolean hasRowsDeleted();
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
long getRowsDeleted();
-
+
// optional uint64 versionsDeleted = 2;
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
boolean hasVersionsDeleted();
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
long getVersionsDeleted();
}
+ /**
+ * Protobuf type {@code BulkDeleteResponse}
+ */
public static final class BulkDeleteResponse extends
com.google.protobuf.GeneratedMessage
implements BulkDeleteResponseOrBuilder {
// Use BulkDeleteResponse.newBuilder() to construct.
- private BulkDeleteResponse(Builder builder) {
+ private BulkDeleteResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private BulkDeleteResponse(boolean noInit) {}
-
+ private BulkDeleteResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
private static final BulkDeleteResponse defaultInstance;
public static BulkDeleteResponse getDefaultInstance() {
return defaultInstance;
}
-
+
public BulkDeleteResponse getDefaultInstanceForType() {
return defaultInstance;
}
-
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private BulkDeleteResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ rowsDeleted_ = input.readUInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ versionsDeleted_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_fieldAccessorTable;
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
}
-
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public BulkDeleteResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new BulkDeleteResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
private int bitField0_;
// required uint64 rowsDeleted = 1;
public static final int ROWSDELETED_FIELD_NUMBER = 1;
private long rowsDeleted_;
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
public boolean hasRowsDeleted() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
public long getRowsDeleted() {
return rowsDeleted_;
}
-
+
// optional uint64 versionsDeleted = 2;
public static final int VERSIONSDELETED_FIELD_NUMBER = 2;
private long versionsDeleted_;
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
public boolean hasVersionsDeleted() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
public long getVersionsDeleted() {
return versionsDeleted_;
}
-
+
private void initFields() {
rowsDeleted_ = 0L;
versionsDeleted_ = 0L;
@@ -847,7 +1128,7 @@ public final class BulkDeleteProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasRowsDeleted()) {
memoizedIsInitialized = 0;
return false;
@@ -855,7 +1136,7 @@ public final class BulkDeleteProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -867,12 +1148,12 @@ public final class BulkDeleteProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -886,14 +1167,14 @@ public final class BulkDeleteProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -903,7 +1184,7 @@ public final class BulkDeleteProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) obj;
-
+
boolean result = true;
result = result && (hasRowsDeleted() == other.hasRowsDeleted());
if (hasRowsDeleted()) {
@@ -919,9 +1200,13 @@ public final class BulkDeleteProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
+
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRowsDeleted()) {
@@ -933,89 +1218,79 @@ public final class BulkDeleteProtos {
hash = (53 * hash) + hashLong(getVersionsDeleted());
}
hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
+ /**
+ * Protobuf type {@code BulkDeleteResponse}
+ */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponseOrBuilder {
@@ -1023,18 +1298,21 @@ public final class BulkDeleteProtos {
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_fieldAccessorTable;
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
}
-
+
// Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(BuilderParent parent) {
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -1045,7 +1323,7 @@ public final class BulkDeleteProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
rowsDeleted_ = 0L;
@@ -1054,20 +1332,20 @@ public final class BulkDeleteProtos {
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDescriptor();
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_descriptor;
}
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse build() {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = buildPartial();
if (!result.isInitialized()) {
@@ -1075,17 +1353,7 @@ public final class BulkDeleteProtos {
}
return result;
}
-
- private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse buildPartial() {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse(this);
int from_bitField0_ = bitField0_;
@@ -1102,7 +1370,7 @@ public final class BulkDeleteProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) {
return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)other);
@@ -1111,7 +1379,7 @@ public final class BulkDeleteProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other) {
if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()) return this;
if (other.hasRowsDeleted()) {
@@ -1123,7 +1391,7 @@ public final class BulkDeleteProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasRowsDeleted()) {
@@ -1131,111 +1399,121 @@ public final class BulkDeleteProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 8: {
- bitField0_ |= 0x00000001;
- rowsDeleted_ = input.readUInt64();
- break;
- }
- case 16: {
- bitField0_ |= 0x00000002;
- versionsDeleted_ = input.readUInt64();
- break;
- }
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
}
}
+ return this;
}
-
private int bitField0_;
-
+
// required uint64 rowsDeleted = 1;
private long rowsDeleted_ ;
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
public boolean hasRowsDeleted() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
public long getRowsDeleted() {
return rowsDeleted_;
}
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
public Builder setRowsDeleted(long value) {
bitField0_ |= 0x00000001;
rowsDeleted_ = value;
onChanged();
return this;
}
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
public Builder clearRowsDeleted() {
bitField0_ = (bitField0_ & ~0x00000001);
rowsDeleted_ = 0L;
onChanged();
return this;
}
-
+
// optional uint64 versionsDeleted = 2;
private long versionsDeleted_ ;
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
public boolean hasVersionsDeleted() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
public long getVersionsDeleted() {
return versionsDeleted_;
}
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
public Builder setVersionsDeleted(long value) {
bitField0_ |= 0x00000002;
versionsDeleted_ = value;
onChanged();
return this;
}
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
public Builder clearVersionsDeleted() {
bitField0_ = (bitField0_ & ~0x00000002);
versionsDeleted_ = 0L;
onChanged();
return this;
}
-
+
// @@protoc_insertion_point(builder_scope:BulkDeleteResponse)
}
-
+
static {
defaultInstance = new BulkDeleteResponse(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:BulkDeleteResponse)
}
-
+
+ /**
+ * Protobuf service {@code BulkDeleteService}
+ */
public static abstract class BulkDeleteService
implements com.google.protobuf.Service {
protected BulkDeleteService() {}
-
+
public interface Interface {
+ /**
+ * rpc delete(.BulkDeleteRequest) returns (.BulkDeleteResponse);
+ */
public abstract void delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
com.google.protobuf.RpcCallback done);
-
+
}
-
+
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new BulkDeleteService() {
@@ -1246,10 +1524,10 @@ public final class BulkDeleteProtos {
com.google.protobuf.RpcCallback done) {
impl.delete(controller, request, done);
}
-
+
};
}
-
+
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
@@ -1257,7 +1535,7 @@ public final class BulkDeleteProtos {
getDescriptorForType() {
return getDescriptor();
}
-
+
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
@@ -1275,7 +1553,7 @@ public final class BulkDeleteProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@@ -1291,7 +1569,7 @@ public final class BulkDeleteProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@@ -1307,15 +1585,18 @@ public final class BulkDeleteProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
};
}
-
+
+ /**
+ * rpc delete(.BulkDeleteRequest) returns (.BulkDeleteResponse);
+ */
public abstract void delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
com.google.protobuf.RpcCallback done);
-
+
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@@ -1325,7 +1606,7 @@ public final class BulkDeleteProtos {
getDescriptorForType() {
return getDescriptor();
}
-
+
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
@@ -1347,7 +1628,7 @@ public final class BulkDeleteProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@@ -1363,7 +1644,7 @@ public final class BulkDeleteProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@@ -1379,23 +1660,23 @@ public final class BulkDeleteProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
-
+
public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
-
+
private final com.google.protobuf.RpcChannel channel;
-
+
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
-
+
public void delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
@@ -1411,26 +1692,26 @@ public final class BulkDeleteProtos {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()));
}
}
-
+
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
-
+
public interface BlockingInterface {
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request)
throws com.google.protobuf.ServiceException;
}
-
+
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
-
+
private final com.google.protobuf.BlockingRpcChannel channel;
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request)
@@ -1441,10 +1722,12 @@ public final class BulkDeleteProtos {
request,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance());
}
-
+
}
+
+ // @@protoc_insertion_point(class_scope:BulkDeleteService)
}
-
+
private static com.google.protobuf.Descriptors.Descriptor
internal_static_BulkDeleteRequest_descriptor;
private static
@@ -1455,7 +1738,7 @@ public final class BulkDeleteProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_BulkDeleteResponse_fieldAccessorTable;
-
+
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@@ -1487,17 +1770,13 @@ public final class BulkDeleteProtos {
internal_static_BulkDeleteRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_BulkDeleteRequest_descriptor,
- new java.lang.String[] { "Scan", "DeleteType", "Timestamp", "RowBatchSize", },
- org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class,
- org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
+ new java.lang.String[] { "Scan", "DeleteType", "Timestamp", "RowBatchSize", });
internal_static_BulkDeleteResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_BulkDeleteResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_BulkDeleteResponse_descriptor,
- new java.lang.String[] { "RowsDeleted", "VersionsDeleted", },
- org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class,
- org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
+ new java.lang.String[] { "RowsDeleted", "VersionsDeleted", });
return null;
}
};
@@ -1507,6 +1786,6 @@ public final class BulkDeleteProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
}, assigner);
}
-
+
// @@protoc_insertion_point(outer_class_scope)
}
diff --git hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java
index 123bf3b..a71e351 100644
--- hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java
+++ hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java
@@ -11,69 +11,130 @@ public final class ExampleProtos {
public interface CountRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
+ /**
+ * Protobuf type {@code CountRequest}
+ */
public static final class CountRequest extends
com.google.protobuf.GeneratedMessage
implements CountRequestOrBuilder {
// Use CountRequest.newBuilder() to construct.
- private CountRequest(Builder builder) {
+ private CountRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private CountRequest(boolean noInit) {}
-
+ private CountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
private static final CountRequest defaultInstance;
public static CountRequest getDefaultInstance() {
return defaultInstance;
}
-
+
public CountRequest getDefaultInstanceForType() {
return defaultInstance;
}
-
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private CountRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable;
+ return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
}
-
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public CountRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new CountRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -83,101 +144,95 @@ public final class ExampleProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) obj;
-
+
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
+
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
+ /**
+ * Protobuf type {@code CountRequest}
+ */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequestOrBuilder {
@@ -185,18 +240,21 @@ public final class ExampleProtos {
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable;
+ return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
}
-
+
// Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(BuilderParent parent) {
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -207,25 +265,25 @@ public final class ExampleProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDescriptor();
+ return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_descriptor;
}
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest build() {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = buildPartial();
if (!result.isInitialized()) {
@@ -233,23 +291,13 @@ public final class ExampleProtos {
}
return result;
}
-
- private org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest buildPartial() {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest(this);
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) {
return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)other);
@@ -258,102 +306,171 @@ public final class ExampleProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other) {
if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
}
}
+ return this;
}
-
-
+
// @@protoc_insertion_point(builder_scope:CountRequest)
}
-
+
static {
defaultInstance = new CountRequest(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:CountRequest)
}
-
+
public interface CountResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
-
+
// required int64 count = 1 [default = 0];
+ /**
+ * required int64 count = 1 [default = 0];
+ */
boolean hasCount();
+ /**
+ * required int64 count = 1 [default = 0];
+ */
long getCount();
}
+ /**
+ * Protobuf type {@code CountResponse}
+ */
public static final class CountResponse extends
com.google.protobuf.GeneratedMessage
implements CountResponseOrBuilder {
// Use CountResponse.newBuilder() to construct.
- private CountResponse(Builder builder) {
+ private CountResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
super(builder);
+ this.unknownFields = builder.getUnknownFields();
}
- private CountResponse(boolean noInit) {}
-
+ private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
private static final CountResponse defaultInstance;
public static CountResponse getDefaultInstance() {
return defaultInstance;
}
-
+
public CountResponse getDefaultInstanceForType() {
return defaultInstance;
}
-
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private CountResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ count_ = input.readInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable;
+ return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public CountResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new CountResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
}
-
+
private int bitField0_;
// required int64 count = 1 [default = 0];
public static final int COUNT_FIELD_NUMBER = 1;
private long count_;
+ /**
+ * required int64 count = 1 [default = 0];
+ */
public boolean hasCount() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * required int64 count = 1 [default = 0];
+ */
public long getCount() {
return count_;
}
-
+
private void initFields() {
count_ = 0L;
}
@@ -361,7 +478,7 @@ public final class ExampleProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
-
+
if (!hasCount()) {
memoizedIsInitialized = 0;
return false;
@@ -369,7 +486,7 @@ public final class ExampleProtos {
memoizedIsInitialized = 1;
return true;
}
-
+
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@@ -378,12 +495,12 @@ public final class ExampleProtos {
}
getUnknownFields().writeTo(output);
}
-
+
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
-
+
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@@ -393,14 +510,14 @@ public final class ExampleProtos {
memoizedSerializedSize = size;
return size;
}
-
+
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
-
+
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@@ -410,7 +527,7 @@ public final class ExampleProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) obj;
-
+
boolean result = true;
result = result && (hasCount() == other.hasCount());
if (hasCount()) {
@@ -421,9 +538,13 @@ public final class ExampleProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
-
+
+ private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCount()) {
@@ -431,89 +552,79 @@ public final class ExampleProtos {
hash = (53 * hash) + hashLong(getCount());
}
hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
return hash;
}
-
+
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
+ return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
+ return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
+ return PARSER.parseFrom(input, extensionRegistry);
}
-
+
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
-
+
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
+ /**
+ * Protobuf type {@code CountResponse}
+ */
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponseOrBuilder {
@@ -521,18 +632,21 @@ public final class ExampleProtos {
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_descriptor;
}
-
+
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable;
+ return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
}
-
+
// Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
-
- private Builder(BuilderParent parent) {
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@@ -543,27 +657,27 @@ public final class ExampleProtos {
private static Builder create() {
return new Builder();
}
-
+
public Builder clear() {
super.clear();
count_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
-
+
public Builder clone() {
return create().mergeFrom(buildPartial());
}
-
+
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDescriptor();
+ return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_descriptor;
}
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance();
}
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse build() {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = buildPartial();
if (!result.isInitialized()) {
@@ -571,17 +685,7 @@ public final class ExampleProtos {
}
return result;
}
-
- private org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse buildPartial() {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse(this);
int from_bitField0_ = bitField0_;
@@ -594,7 +698,7 @@ public final class ExampleProtos {
onBuilt();
return result;
}
-
+
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) {
return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)other);
@@ -603,7 +707,7 @@ public final class ExampleProtos {
return this;
}
}
-
+
public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other) {
if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()) return this;
if (other.hasCount()) {
@@ -612,7 +716,7 @@ public final class ExampleProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
-
+
public final boolean isInitialized() {
if (!hasCount()) {
@@ -620,90 +724,96 @@ public final class ExampleProtos {
}
return true;
}
-
+
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 8: {
- bitField0_ |= 0x00000001;
- count_ = input.readInt64();
- break;
- }
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
}
}
+ return this;
}
-
private int bitField0_;
-
+
// required int64 count = 1 [default = 0];
private long count_ ;
+ /**
+ * required int64 count = 1 [default = 0];
+ */
public boolean hasCount() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
+ /**
+ * required int64 count = 1 [default = 0];
+ */
public long getCount() {
return count_;
}
+ /**
+ * required int64 count = 1 [default = 0];
+ */
public Builder setCount(long value) {
bitField0_ |= 0x00000001;
count_ = value;
onChanged();
return this;
}
+ /**
+ * required int64 count = 1 [default = 0];
+ */
public Builder clearCount() {
bitField0_ = (bitField0_ & ~0x00000001);
count_ = 0L;
onChanged();
return this;
}
-
+
// @@protoc_insertion_point(builder_scope:CountResponse)
}
-
+
static {
defaultInstance = new CountResponse(true);
defaultInstance.initFields();
}
-
+
// @@protoc_insertion_point(class_scope:CountResponse)
}
-
+
+ /**
+ * Protobuf service {@code RowCountService}
+ */
public static abstract class RowCountService
implements com.google.protobuf.Service {
protected RowCountService() {}
-
+
public interface Interface {
+ /**
+ * rpc getRowCount(.CountRequest) returns (.CountResponse);
+ */
public abstract void getRowCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
com.google.protobuf.RpcCallback done);
-
+
+ /**
+ * rpc getKeyValueCount(.CountRequest) returns (.CountResponse);
+ */
public abstract void getKeyValueCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
com.google.protobuf.RpcCallback done);
-
+
}
-
+
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new RowCountService() {
@@ -714,7 +824,7 @@ public final class ExampleProtos {
com.google.protobuf.RpcCallback done) {
impl.getRowCount(controller, request, done);
}
-
+
@java.lang.Override
public void getKeyValueCount(
com.google.protobuf.RpcController controller,
@@ -722,10 +832,10 @@ public final class ExampleProtos {
com.google.protobuf.RpcCallback done) {
impl.getKeyValueCount(controller, request, done);
}
-
+
};
}
-
+
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
@@ -733,7 +843,7 @@ public final class ExampleProtos {
getDescriptorForType() {
return getDescriptor();
}
-
+
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
@@ -753,7 +863,7 @@ public final class ExampleProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@@ -771,7 +881,7 @@ public final class ExampleProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@@ -789,20 +899,26 @@ public final class ExampleProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
};
}
-
+
+ /**
+ * rpc getRowCount(.CountRequest) returns (.CountResponse);
+ */
public abstract void getRowCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
com.google.protobuf.RpcCallback done);
-
+
+ /**
+ * rpc getKeyValueCount(.CountRequest) returns (.CountResponse);
+ */
public abstract void getKeyValueCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
com.google.protobuf.RpcCallback done);
-
+
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@@ -812,7 +928,7 @@ public final class ExampleProtos {
getDescriptorForType() {
return getDescriptor();
}
-
+
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
@@ -839,7 +955,7 @@ public final class ExampleProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@@ -857,7 +973,7 @@ public final class ExampleProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@@ -875,23 +991,23 @@ public final class ExampleProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
-
+
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
-
+
public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.RowCountService implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
-
+
private final com.google.protobuf.RpcChannel channel;
-
+
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
-
+
public void getRowCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
@@ -906,7 +1022,7 @@ public final class ExampleProtos {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()));
}
-
+
public void getKeyValueCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
@@ -922,31 +1038,31 @@ public final class ExampleProtos {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()));
}
}
-
+
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
-
+
public interface BlockingInterface {
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request)
throws com.google.protobuf.ServiceException;
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request)
throws com.google.protobuf.ServiceException;
}
-
+
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
-
+
private final com.google.protobuf.BlockingRpcChannel channel;
-
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request)
@@ -957,8 +1073,8 @@ public final class ExampleProtos {
request,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance());
}
-
-
+
+
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request)
@@ -969,10 +1085,12 @@ public final class ExampleProtos {
request,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance());
}
-
+
}
+
+ // @@protoc_insertion_point(class_scope:RowCountService)
}
-
+
private static com.google.protobuf.Descriptors.Descriptor
internal_static_CountRequest_descriptor;
private static
@@ -983,7 +1101,7 @@ public final class ExampleProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_CountResponse_fieldAccessorTable;
-
+
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@@ -1010,17 +1128,13 @@ public final class ExampleProtos {
internal_static_CountRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CountRequest_descriptor,
- new java.lang.String[] { },
- org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class,
- org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
+ new java.lang.String[] { });
internal_static_CountResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_CountResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CountResponse_descriptor,
- new java.lang.String[] { "Count", },
- org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class,
- org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
+ new java.lang.String[] { "Count", });
return null;
}
};
@@ -1029,6 +1143,6 @@ public final class ExampleProtos {
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
-
+
// @@protoc_insertion_point(outer_class_scope)
}
diff --git hbase-protocol/README.txt hbase-protocol/README.txt
index b8e21ae..b67f0c3 100644
--- hbase-protocol/README.txt
+++ hbase-protocol/README.txt
@@ -7,28 +7,21 @@ the protobuf protoc tool is in your $PATH (You may need to download it and build
it first; its part of the protobuf package obtainable from here:
http://code.google.com/p/protobuf/downloads/list).
-Then run the following (You should be able to just copy and paste the below into a
-terminal and hit return -- the protoc compiler runs fast):
-
- UNIX_PROTO_DIR=src/main/protobuf
- JAVA_DIR=src/main/java/
- mkdir -p $JAVA_DIR 2> /dev/null
- if which cygpath 2> /dev/null; then
- PROTO_DIR=`cygpath --windows $UNIX_PROTO_DIR`
- JAVA_DIR=`cygpath --windows $JAVA_DIR`
- else
- PROTO_DIR=$UNIX_PROTO_DIR
- fi
- # uncomment the next line if you want to remove before generating
- # rm -fr $JAVA_DIR/org/apache/hadoop/hbase/protobuf/generated
- for PROTO_FILE in $UNIX_PROTO_DIR/*.proto
- do
- protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE
- done
+HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
+compile the protoc definitions by invoking maven with profile compile-protobuf or
+passing in compile-protobuf property.
+
+mvn compile -Dcompile-protobuf
+or
+mvn compile -Pcompile-protobuf
+
+You may also want to define protoc.path for the protoc binary
+
+mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
+
+If you have added a new proto file, you should add it to the pom.xml file first.
+Other modules also support the maven profile.
After you've done the above, check it in and then check it in (or post a patch
on a JIRA with your definition file changes and the generated files).
-Optionally, you can uncomment the hadoop-maven-plugins plugin in hbase-protocol/pom.xml.
-This plugin will generate for the classes during the build. Once again, you will need protocol buffers
-to be installed on your build machine (https://developers.google.com/protocol-buffers)
diff --git hbase-protocol/pom.xml hbase-protocol/pom.xml
index e81ee62..12f4b78 100644
--- hbase-protocol/pom.xml
+++ hbase-protocol/pom.xml
@@ -69,63 +69,6 @@
-
@@ -180,6 +123,67 @@
true
+
+ compile-protobuf
+
+
+ compile-protobuf
+
+
+
+
+
+ org.apache.hadoop
+ hadoop-maven-plugins
+
+
+ compile-protoc
+ generate-sources
+
+ protoc
+
+
+
+ ${basedir}/src/main/protobuf
+
+
+ ${basedir}/src/main/protobuf
+
+ AccessControl.proto
+ Admin.proto
+ Aggregate.proto
+ Authentication.proto
+ Cell.proto
+ Client.proto
+ ClusterId.proto
+ ClusterStatus.proto
+ Comparator.proto
+ ErrorHandling.proto
+ Filter.proto
+ FS.proto
+ HBase.proto
+ HFile.proto
+ LoadBalancer.proto
+ MapReduce.proto
+ Master.proto
+ MultiRowMutation.proto
+ RegionServerStatus.proto
+ RowProcessor.proto
+ RPC.proto
+ SecureBulkLoad.proto
+ Tracing.proto
+ WAL.proto
+ ZooKeeper.proto
+
+
+
+
+
+
+
+
+
+
+
-
diff --git hbase-server/pom.xml hbase-server/pom.xml
index e66fc62..29f0215 100644
--- hbase-server/pom.xml
+++ hbase-server/pom.xml
@@ -685,5 +685,75 @@
+
+ compile-protobuf
+
+
+ compile-protobuf
+
+
+
+
+
+ org.apache.hadoop
+ hadoop-maven-plugins
+
+
+ compile-protoc
+ generate-sources
+
+ protoc
+
+
+
+ ${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf
+
+
+
+ ${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf
+
+ CellMessage.proto
+ CellSetMessage.proto
+ ColumnSchemaMessage.proto
+ ScannerMessage.proto
+ StorageClusterStatusMessage.proto
+ TableInfoMessage.proto
+ TableListMessage.proto
+ TableSchemaMessage.proto
+ VersionMessage.proto
+
+
+
+
+
+
+ compile-test-protoc
+ generate-test-sources
+
+ protoc
+
+
+
+ ${basedir}/src/test/protobuf
+
+
+ ${basedir}/src/test/protobuf
+
+ ColumnAggregationProtocol.proto
+ IncrementCounterProcessor.proto
+ PingProtocol.proto
+ test.proto
+ test_delayed_rpc.proto
+ test_rpc_service.proto
+
+
+
+
+
+
+
+
+
+
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java
index b045cdd..f5f6a95 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java
@@ -728,7 +728,7 @@ public final class ColumnSchemaMessage {
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
@@ -802,7 +802,7 @@ public final class ColumnSchemaMessage {
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
@@ -1394,7 +1394,7 @@ public final class ColumnSchemaMessage {
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
@@ -1790,7 +1790,7 @@ public final class ColumnSchemaMessage {
getCompressionBytes() {
java.lang.Object ref = compression_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
compression_ = b;
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java
index 493463a..37132ab 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java
@@ -1113,7 +1113,7 @@ public final class ScannerMessage {
getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
filter_ = b;
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
index 897a234..05ff7a3 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
@@ -2405,7 +2405,7 @@ public final class StorageClusterStatusMessage {
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
index e37bb56..421c0ec 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
@@ -806,7 +806,7 @@ public final class TableInfoMessage {
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
@@ -985,7 +985,7 @@ public final class TableInfoMessage {
getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
location_ = b;
@@ -1449,7 +1449,7 @@ public final class TableInfoMessage {
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
index ecad722..88f9cd3 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
@@ -744,7 +744,7 @@ public final class TableSchemaMessage {
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
@@ -818,7 +818,7 @@ public final class TableSchemaMessage {
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
@@ -1446,7 +1446,7 @@ public final class TableSchemaMessage {
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
index a725f32..590b0d3 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
@@ -751,7 +751,7 @@ public final class VersionMessage {
getRestVersionBytes() {
java.lang.Object ref = restVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
restVersion_ = b;
@@ -825,7 +825,7 @@ public final class VersionMessage {
getJvmVersionBytes() {
java.lang.Object ref = jvmVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
jvmVersion_ = b;
@@ -899,7 +899,7 @@ public final class VersionMessage {
getOsVersionBytes() {
java.lang.Object ref = osVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
osVersion_ = b;
@@ -973,7 +973,7 @@ public final class VersionMessage {
getServerVersionBytes() {
java.lang.Object ref = serverVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serverVersion_ = b;
@@ -1047,7 +1047,7 @@ public final class VersionMessage {
getJerseyVersionBytes() {
java.lang.Object ref = jerseyVersion_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
jerseyVersion_ = b;
diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java
index 79cc5ce..57903cd 100644
--- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java
@@ -807,7 +807,7 @@ public final class PingProtos {
getPongBytes() {
java.lang.Object ref = pong_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pong_ = b;
@@ -2980,7 +2980,7 @@ public final class PingProtos {
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
@@ -3489,7 +3489,7 @@ public final class PingProtos {
getResponseBytes() {
java.lang.Object ref = response_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
response_ = b;
diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java
index 4e94e61..3da0254 100644
--- hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java
@@ -1145,7 +1145,7 @@ public final class TestProtos {
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
@@ -1662,7 +1662,7 @@ public final class TestProtos {
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
diff --git pom.xml pom.xml
index a363696..565700a 100644
--- pom.xml
+++ pom.xml
@@ -690,6 +690,15 @@
false
+
+ org.apache.hadoop
+ hadoop-maven-plugins
+ ${hadoop-two.version}
+
+ ${protobuf.version}
+ ${protoc.path}
+
+
diff --git src/main/docbkx/developer.xml src/main/docbkx/developer.xml
index 5402acf..268a0b9 100644
--- src/main/docbkx/developer.xml
+++ src/main/docbkx/developer.xml
@@ -165,11 +165,21 @@ mvn clean package -DskipTests
Build Protobuf
- You may need to change the protobuf definitions that reside in the hbase-protocol module.
+ You may need to change the protobuf definitions that reside in the hbase-protocol module or other modules.
The protobuf files are located in hbase-protocol/src/main/protobuf.
- For the change to be effective, you will need to
- regenerate the classes (read the hbase-protocol/README.txt for more details).
+ For the change to be effective, you will need to regenerate the classes. You can use maven profile compile-protobuf to do this.
+
+mvn compile -Dcompile-protobuf
+or
+mvn compile -Pcompile-protobuf
+
+
+You may also want to define protoc.path for the protoc binary
+
+mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
+
+ Read the hbase-protocol/README.txt for more details.