diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java index db740bc..d899426 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -11303,6 +11303,685 @@ public final class HBaseProtos { // @@protoc_insertion_point(class_scope:NameInt64Pair) } + public interface EmptyMsgOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class EmptyMsg extends + com.google.protobuf.GeneratedMessage + implements EmptyMsgOrBuilder { + // Use EmptyMsg.newBuilder() to construct. + private EmptyMsg(Builder builder) { + super(builder); + } + private EmptyMsg(boolean noInit) {} + + private static final EmptyMsg defaultInstance; + public static EmptyMsg getDefaultInstance() { + return defaultInstance; + } + + public EmptyMsg getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsgOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_EmptyMsg_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:EmptyMsg) + } + + static { + defaultInstance = new EmptyMsg(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:EmptyMsg) + } + + public interface LongMsgOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int64 longMsg = 1; + boolean hasLongMsg(); + long getLongMsg(); + } + public static final class LongMsg extends + com.google.protobuf.GeneratedMessage + implements LongMsgOrBuilder { + // Use LongMsg.newBuilder() to construct. + private LongMsg(Builder builder) { + super(builder); + } + private LongMsg(boolean noInit) {} + + private static final LongMsg defaultInstance; + public static LongMsg getDefaultInstance() { + return defaultInstance; + } + + public LongMsg getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_fieldAccessorTable; + } + + private int bitField0_; + // required int64 longMsg = 1; + public static final int LONGMSG_FIELD_NUMBER = 1; + private long longMsg_; + public boolean hasLongMsg() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLongMsg() { + return longMsg_; + } + + private void initFields() { + longMsg_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLongMsg()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, longMsg_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, longMsg_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg) obj; + + boolean result = true; + result = result && (hasLongMsg() == other.hasLongMsg()); + if (hasLongMsg()) { + result = result && (getLongMsg() + == other.getLongMsg()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLongMsg()) { + hash = (37 * hash) + LONGMSG_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLongMsg()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsgOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_LongMsg_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + longMsg_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.longMsg_ = longMsg_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.getDefaultInstance()) return this; + if (other.hasLongMsg()) { + setLongMsg(other.getLongMsg()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLongMsg()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + longMsg_ = input.readInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required int64 longMsg = 1; + private long longMsg_ ; + public boolean hasLongMsg() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLongMsg() { + return longMsg_; + } + public Builder setLongMsg(long value) { + bitField0_ |= 0x00000001; + longMsg_ = value; + onChanged(); + return this; + } + public Builder clearLongMsg() { + bitField0_ = (bitField0_ & ~0x00000001); + longMsg_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LongMsg) + } + + static { + defaultInstance = new LongMsg(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LongMsg) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_TableSchema_descriptor; private static @@ -11388,6 +12067,16 @@ public final class HBaseProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_NameInt64Pair_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_EmptyMsg_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_EmptyMsg_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LongMsg_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LongMsg_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -11440,13 +12129,14 @@ public final class HBaseProtos { "value\030\002 \002(\t\",\n\rNameBytesPair\022\014\n\004name\030\001 \002" + "(\t\022\r\n\005value\030\002 \001(\014\"/\n\016BytesBytesPair\022\r\n\005f" + "irst\030\001 \002(\014\022\016\n\006second\030\002 \002(\014\",\n\rNameInt64P" + - "air\022\014\n\004name\030\001 \001(\t\022\r\n\005value\030\002 \001(\003*r\n\013Comp" + + "air\022\014\n\004name\030\001 \001(\t\022\r\n\005value\030\002 \001(\003\"\n\n\010Empt" + + "yMsg\"\032\n\007LongMsg\022\017\n\007longMsg\030\001 \002(\003*r\n\013Comp" + "areType\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005" + "EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQU" + "AL\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_OP\020\006*_\n\007KeyType\022" + "\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDEL" + - "ETE_COLUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIM" + - "UM\020\377\001B>\n*org.apache.hadoop.hbase.protobu", + "ETE_COLUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIM", + "UM\020\377\001B>\n*org.apache.hadoop.hbase.protobu" + "f.generatedB\013HBaseProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = @@ -11590,6 +12280,22 @@ public final class HBaseProtos { new java.lang.String[] { "Name", "Value", }, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); + internal_static_EmptyMsg_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_EmptyMsg_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_EmptyMsg_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg.Builder.class); + internal_static_LongMsg_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_LongMsg_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LongMsg_descriptor, + new java.lang.String[] { "LongMsg", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg.Builder.class); return null; } }; diff --git a/hbase-protocol/src/main/protobuf/hbase.proto b/hbase-protocol/src/main/protobuf/hbase.proto index 1ff2a4d..197a719 100644 --- a/hbase-protocol/src/main/protobuf/hbase.proto +++ b/hbase-protocol/src/main/protobuf/hbase.proto @@ -268,3 +268,10 @@ message NameInt64Pair { optional string name = 1; optional int64 value = 2; } + +message EmptyMsg { +} + +message LongMsg { + required int64 longMsg = 1; +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 2e0c05e..272c0fc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -20,6 +20,10 @@ package org.apache.hadoop.hbase.client.coprocessor; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; @@ -51,6 +55,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import com.google.protobuf.ByteString; +import com.google.protobuf.Message; /** * This client class is for invoking the aggregate functions deployed on the @@ -98,7 +103,8 @@ public class AggregationClient { * The caller is supposed to handle the exception as they are thrown * & propagated to it. */ - public R max(final byte[] tableName, final ColumnInterpreter ci, + public + R max(final byte[] tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateArgument requestArg = validateArgAndGetPB(scan, ci); class MaxCallBack implements Batch.Callback { @@ -130,9 +136,9 @@ public class AggregationClient { throw controller.getFailedOn(); } if (response.getFirstPartCount() > 0) { - return ci.castToCellType( - ci.parseResponseAsPromotedType( - getBytesFromResponse(response.getFirstPart(0)))); + ByteString b = response.getFirstPart(0); + Q q = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 3, b); + return ci.getCellValueFromProto(q); } return null; } @@ -168,7 +174,8 @@ public class AggregationClient { * @return min val * @throws Throwable */ - public R min(final byte[] tableName, final ColumnInterpreter ci, + public + R min(final byte[] tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateArgument requestArg = validateArgAndGetPB(scan, ci); class MinCallBack implements Batch.Callback { @@ -202,9 +209,9 @@ public class AggregationClient { throw controller.getFailedOn(); } if (response.getFirstPartCount() > 0) { - return ci.castToCellType( - ci.parseResponseAsPromotedType( - getBytesFromResponse(response.getFirstPart(0)))); + ByteString b = response.getFirstPart(0); + Q q = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 3, b); + return ci.getCellValueFromProto(q); } return null; } @@ -231,8 +238,9 @@ public class AggregationClient { * @return * @throws Throwable */ - public long rowCount(final byte[] tableName, - final ColumnInterpreter ci, final Scan scan) throws Throwable { + public + long rowCount(final byte[] tableName, + final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateArgument requestArg = validateArgAndGetPB(scan, ci); class RowNumCallback implements Batch.Callback { private final AtomicLong rowCountL = new AtomicLong(0); @@ -285,7 +293,8 @@ public class AggregationClient { * @return sum * @throws Throwable */ - public S sum(final byte[] tableName, final ColumnInterpreter ci, + public + S sum(final byte[] tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateArgument requestArg = validateArgAndGetPB(scan, ci); @@ -320,8 +329,10 @@ public class AggregationClient { if (response.getFirstPartCount() == 0) { return null; } - return ci.parseResponseAsPromotedType( - getBytesFromResponse(response.getFirstPart(0))); + ByteString b = response.getFirstPart(0); + T t = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 4, b); + S s = ci.getPromotedValueFromProto(t); + return s; } }, sumCallBack); } finally { @@ -340,8 +351,9 @@ public class AggregationClient { * @param scan * @throws Throwable */ - private Pair getAvgArgs(final byte[] tableName, - final ColumnInterpreter ci, final Scan scan) throws Throwable { + private + Pair getAvgArgs(final byte[] tableName, + final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateArgument requestArg = validateArgAndGetPB(scan, ci); class AvgCallBack implements Batch.Callback> { S sum = null; @@ -379,8 +391,10 @@ public class AggregationClient { if (response.getFirstPartCount() == 0) { return pair; } - pair.setFirst(ci.parseResponseAsPromotedType( - getBytesFromResponse(response.getFirstPart(0)))); + ByteString b = response.getFirstPart(0); + T t = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 4, b); + S s = ci.getPromotedValueFromProto(t); + pair.setFirst(s); ByteBuffer bb = ByteBuffer.allocate(8).put( getBytesFromResponse(response.getSecondPart())); bb.rewind(); @@ -408,8 +422,9 @@ public class AggregationClient { * @return * @throws Throwable */ - public double avg(final byte[] tableName, - final ColumnInterpreter ci, Scan scan) throws Throwable { + public + double avg(final byte[] tableName, + final ColumnInterpreter ci, Scan scan) throws Throwable { Pair p = getAvgArgs(tableName, ci, scan); return ci.divideForAvg(p.getFirst(), p.getSecond()); } @@ -425,8 +440,9 @@ public class AggregationClient { * @return * @throws Throwable */ - private Pair, Long> getStdArgs(final byte[] tableName, - final ColumnInterpreter ci, final Scan scan) throws Throwable { + private + Pair, Long> getStdArgs(final byte[] tableName, + final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateArgument requestArg = validateArgAndGetPB(scan, ci); class StdCallback implements Batch.Callback, Long>> { long rowCountVal = 0l; @@ -474,8 +490,10 @@ public class AggregationClient { } List list = new ArrayList(); for (int i = 0; i < response.getFirstPartCount(); i++) { - list.add(ci.parseResponseAsPromotedType( - getBytesFromResponse(response.getFirstPart(i)))); + ByteString b = response.getFirstPart(i); + T t = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 4, b); + S s = ci.getPromotedValueFromProto(t); + list.add(s); } pair.setFirst(list); ByteBuffer bb = ByteBuffer.allocate(8).put( @@ -505,7 +523,8 @@ public class AggregationClient { * @return * @throws Throwable */ - public double std(final byte[] tableName, ColumnInterpreter ci, + public + double std(final byte[] tableName, ColumnInterpreter ci, Scan scan) throws Throwable { Pair, Long> p = getStdArgs(tableName, ci, scan); double res = 0d; @@ -528,9 +547,10 @@ public class AggregationClient { * (sum of values, sum of weights) for all the regions chosen * @throws Throwable */ - private Pair>, List> + private + Pair>, List> getMedianArgs(final byte[] tableName, - final ColumnInterpreter ci, final Scan scan) throws Throwable { + final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateArgument requestArg = validateArgAndGetPB(scan, ci); final NavigableMap> map = new TreeMap>(Bytes.BYTES_COMPARATOR); @@ -572,8 +592,10 @@ public class AggregationClient { List list = new ArrayList(); for (int i = 0; i < response.getFirstPartCount(); i++) { - list.add(ci.parseResponseAsPromotedType( - getBytesFromResponse(response.getFirstPart(i)))); + ByteString b = response.getFirstPart(i); + T t = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 4, b); + S s = ci.getPromotedValueFromProto(t); + list.add(s); } return list; } @@ -597,7 +619,8 @@ public class AggregationClient { * @return R the median * @throws Throwable */ - public R median(final byte[] tableName, ColumnInterpreter ci, + public + R median(final byte[] tableName, ColumnInterpreter ci, Scan scan) throws Throwable { Pair>, List> p = getMedianArgs(tableName, ci, scan); byte[] startRow = null; @@ -672,16 +695,17 @@ public class AggregationClient { return null; } - AggregateArgument validateArgAndGetPB(Scan scan, ColumnInterpreter ci) + AggregateArgument + validateArgAndGetPB(Scan scan, ColumnInterpreter ci) throws IOException { validateParameters(scan); final AggregateArgument.Builder requestBuilder = AggregateArgument.newBuilder(); requestBuilder.setInterpreterClassName(ci.getClass().getCanonicalName()); - ByteString columnInterpreterSpecificData = null; - if ((columnInterpreterSpecificData = ci.columnInterpreterSpecificData()) + P columnInterpreterSpecificData = null; + if ((columnInterpreterSpecificData = ci.getRequestData()) != null) { - requestBuilder.setInterpreterSpecificBytes(columnInterpreterSpecificData); + requestBuilder.setInterpreterSpecificBytes(columnInterpreterSpecificData.toByteString()); } requestBuilder.setScan(ProtobufUtil.toScan(scan)); return requestBuilder.build(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java index 404a3b4..3e86af7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java @@ -19,16 +19,15 @@ package org.apache.hadoop.hbase.client.coprocessor; import java.io.IOException; -import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ByteString; - /** * a concrete column interpreter implementation. The cell value is a Long value * and its promoted data type is also a Long value. For computing aggregation @@ -39,7 +38,8 @@ import com.google.protobuf.ByteString; */ @InterfaceAudience.Public @InterfaceStability.Evolving -public class LongColumnInterpreter implements ColumnInterpreter { +public class LongColumnInterpreter extends ColumnInterpreter { public Long getValue(byte[] colFamily, byte[] colQualifier, KeyValue kv) throws IOException { @@ -97,45 +97,40 @@ public class LongColumnInterpreter implements ColumnInterpreter { return o; } - @Override - public Long parseResponseAsPromotedType(byte[] response) { - ByteBuffer b = ByteBuffer.allocate(8).put(response); - b.rewind(); - long l = b.getLong(); + public Long castToCellType(Long l) { return l; } @Override - public Long castToCellType(Long l) { - return l; + public EmptyMsg getRequestData() { + return EmptyMsg.getDefaultInstance(); } @Override - public ByteString columnInterpreterSpecificData() { - // nothing - return null; + public void initialize(EmptyMsg msg) { + //nothing } @Override - public void initialize(ByteString bytes) { - // nothing + public LongMsg getProtoForCellType(Long t) { + LongMsg.Builder builder = LongMsg.newBuilder(); + return builder.setLongMsg(t).build(); } @Override - public ByteString getProtoForCellType(Long t) { - return getProtoForPromotedOrCellType(t); + public LongMsg getProtoForPromotedType(Long s) { + LongMsg.Builder builder = LongMsg.newBuilder(); + return builder.setLongMsg(s).build(); } @Override - public ByteString getProtoForPromotedType(Long s) { - return getProtoForPromotedOrCellType(s); + public Long getPromotedValueFromProto(LongMsg r) { + return r.getLongMsg(); } - private ByteString getProtoForPromotedOrCellType(Long s) { - ByteBuffer bb = ByteBuffer.allocate(8).putLong(s); - bb.rewind(); - ByteString bs = ByteString.copyFrom(bb); - return bs; + @Override + public Long getCellValueFromProto(LongMsg q) { + return q.getLongMsg(); } } \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index cd7cd3a..342c248 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -19,6 +19,10 @@ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; @@ -42,6 +46,7 @@ import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateServi import org.apache.hadoop.hbase.regionserver.InternalScanner; import com.google.protobuf.ByteString; +import com.google.protobuf.Message; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; import com.google.protobuf.Service; @@ -49,13 +54,17 @@ import com.google.protobuf.Service; /** * A concrete AggregateProtocol implementation. Its system level coprocessor * that computes the aggregate function at a region level. + * {@link ColumnInterpreter} has the explanation for the params. * @param * @param + * @param

+ * @param + * @param */ @InterfaceAudience.Public @InterfaceStability.Evolving -public class AggregateImplementation extends AggregateService implements - CoprocessorService, Coprocessor { +public class AggregateImplementation +extends AggregateService implements CoprocessorService, Coprocessor { protected static Log log = LogFactory.getLog(AggregateImplementation.class); private RegionCoprocessorEnvironment env; @@ -73,7 +82,7 @@ public class AggregateImplementation extends AggregateService implements AggregateResponse response = null; T max = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); @@ -96,7 +105,7 @@ public class AggregateImplementation extends AggregateService implements } while (hasMoreRows); if (max != null) { AggregateResponse.Builder builder = AggregateResponse.newBuilder(); - builder.addFirstPart(ci.getProtoForCellType(max)); + builder.addFirstPart(ci.getProtoForCellType(max).toByteString()); response = builder.build(); } } catch (IOException e) { @@ -127,7 +136,7 @@ public class AggregateImplementation extends AggregateService implements InternalScanner scanner = null; T min = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); @@ -149,7 +158,7 @@ public class AggregateImplementation extends AggregateService implements } while (hasMoreRows); if (min != null) { response = AggregateResponse.newBuilder().addFirstPart( - ci.getProtoForCellType(min)).build(); + ci.getProtoForCellType(min).toByteString()).build(); } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); @@ -179,7 +188,7 @@ public class AggregateImplementation extends AggregateService implements InternalScanner scanner = null; long sum = 0l; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); S sumVal = null; T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); @@ -203,7 +212,7 @@ public class AggregateImplementation extends AggregateService implements } while (hasMoreRows); if (sumVal != null) { response = AggregateResponse.newBuilder().addFirstPart( - ci.getProtoForPromotedType(sumVal)).build(); + ci.getProtoForPromotedType(sumVal).toByteString()).build(); } } catch (IOException e) { ResponseConverter.setControllerException(controller, e); @@ -287,7 +296,7 @@ public class AggregateImplementation extends AggregateService implements AggregateResponse response = null; InternalScanner scanner = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); S sumVal = null; Long rowCountVal = 0l; Scan scan = ProtobufUtil.toScan(request.getScan()); @@ -311,7 +320,7 @@ public class AggregateImplementation extends AggregateService implements rowCountVal++; } while (hasMoreRows); if (sumVal != null) { - ByteString first = ci.getProtoForPromotedType(sumVal); + ByteString first = ci.getProtoForPromotedType(sumVal).toByteString(); AggregateResponse.Builder pair = AggregateResponse.newBuilder(); pair.addFirstPart(first); ByteBuffer bb = ByteBuffer.allocate(8).putLong(rowCountVal); @@ -346,7 +355,7 @@ public class AggregateImplementation extends AggregateService implements InternalScanner scanner = null; AggregateResponse response = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); S sumVal = null, sumSqVal = null, tempVal = null; long rowCountVal = 0l; Scan scan = ProtobufUtil.toScan(request.getScan()); @@ -374,8 +383,8 @@ public class AggregateImplementation extends AggregateService implements rowCountVal++; } while (hasMoreRows); if (sumVal != null) { - ByteString first_sumVal = ci.getProtoForPromotedType(sumVal); - ByteString first_sumSqVal = ci.getProtoForPromotedType(sumSqVal); + ByteString first_sumVal = ci.getProtoForPromotedType(sumVal).toByteString(); + ByteString first_sumSqVal = ci.getProtoForPromotedType(sumSqVal).toByteString(); AggregateResponse.Builder pair = AggregateResponse.newBuilder(); pair.addFirstPart(first_sumVal); pair.addFirstPart(first_sumSqVal); @@ -410,7 +419,7 @@ public class AggregateImplementation extends AggregateService implements AggregateResponse response = null; InternalScanner scanner = null; try { - ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); + ColumnInterpreter ci = constructColumnInterpreterFromRequest(request); S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); @@ -442,9 +451,9 @@ public class AggregateImplementation extends AggregateService implements sumVal = ci.add(sumVal, tempVal); sumWeights = ci.add(sumWeights, tempWeight); } while (hasMoreRows); - ByteString first_sumVal = ci.getProtoForPromotedType(sumVal); + ByteString first_sumVal = ci.getProtoForPromotedType(sumVal).toByteString(); S s = sumWeights == null ? ci.castToReturnType(ci.getMinValue()) : sumWeights; - ByteString first_sumWeights = ci.getProtoForPromotedType(s); + ByteString first_sumWeights = ci.getProtoForPromotedType(s).toByteString(); AggregateResponse.Builder pair = AggregateResponse.newBuilder(); pair.addFirstPart(first_sumVal); pair.addFirstPart(first_sumWeights); @@ -462,15 +471,17 @@ public class AggregateImplementation extends AggregateService implements } @SuppressWarnings("unchecked") - ColumnInterpreter constructColumnInterpreterFromRequest( + ColumnInterpreter constructColumnInterpreterFromRequest( AggregateArgument request) throws IOException { String className = request.getInterpreterClassName(); Class cls; try { cls = Class.forName(className); - ColumnInterpreter ci = (ColumnInterpreter) cls.newInstance(); + ColumnInterpreter ci = (ColumnInterpreter) cls.newInstance(); if (request.hasInterpreterSpecificBytes()) { - ci.initialize(request.getInterpreterSpecificBytes()); + ByteString b = request.getInterpreterSpecificBytes(); + P initMsg = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 2, b); + ci.initialize(initMsg); } return ci; } catch (ClassNotFoundException e) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java index d74929c..cc56c26 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.coprocessor.LongColumnInterpreter; import com.google.protobuf.ByteString; +import com.google.protobuf.Message; /** * Defines how value for specific column is interpreted and provides utility @@ -46,10 +47,14 @@ import com.google.protobuf.ByteString; * returns a type. * @param Cell value data type * @param Promoted data type + * @param

PB message that is used to transport initializer specific bytes + * @param PB message that is used to transport Cell () instance + * @param PB message that is used to transport Promoted () instance */ @InterfaceAudience.Public @InterfaceStability.Evolving -public interface ColumnInterpreter { +public abstract class ColumnInterpreter { /** * @param colFamily @@ -58,7 +63,7 @@ public interface ColumnInterpreter { * @return value of type T * @throws IOException */ - T getValue(byte[] colFamily, byte[] colQualifier, KeyValue kv) + public abstract T getValue(byte[] colFamily, byte[] colQualifier, KeyValue kv) throws IOException; /** @@ -67,36 +72,36 @@ public interface ColumnInterpreter { * @return sum or non null value among (if either of them is null); otherwise * returns a null. */ - public S add(S l1, S l2); + public abstract S add(S l1, S l2); /** * returns the maximum value for this type T * @return max */ - T getMaxValue(); + public abstract T getMaxValue(); - T getMinValue(); + public abstract T getMinValue(); /** * @param o1 * @param o2 * @return multiplication */ - S multiply(S o1, S o2); + public abstract S multiply(S o1, S o2); /** * @param o * @return increment */ - S increment(S o); + public abstract S increment(S o); /** * provides casting opportunity between the data types. * @param o * @return cast */ - S castToReturnType(T o); + public abstract S castToReturnType(T o); /** * This takes care if either of arguments are null. returns 0 if they are @@ -105,7 +110,7 @@ public interface ColumnInterpreter { *

  • >0 if l1 > l2 or l1 is not null and l2 is null. *
  • < 0 if l1 < l2 or l1 is null and l2 is not null. */ - int compare(final T l1, final T l2); + public abstract int compare(final T l1, final T l2); /** * used for computing average of data values. Not providing the divide @@ -114,7 +119,7 @@ public interface ColumnInterpreter { * @param l * @return Average */ - double divideForAvg(S o, Long l); + public abstract double divideForAvg(S o, Long l); /** * This method should return any additional data that is needed on the @@ -124,41 +129,48 @@ public interface ColumnInterpreter { * {@link LongColumnInterpreter}) then null should be returned. * @return the PB message */ - ByteString columnInterpreterSpecificData(); + public abstract P getRequestData(); /** - * Return the PB for type T + * This method should initialize any field(s) of the ColumnInterpreter with + * a parsing of the passed message bytes (used on the server side). + * @param msg + */ + public abstract void initialize(P msg); + + /** + * This method gets the PB message corresponding to the cell type * @param t - * @return PB-message + * @return the PB message for the cell-type instance */ - ByteString getProtoForCellType(T t); + public abstract Q getProtoForCellType(T t); /** - * Return the PB for type S - * @param s - * @return PB-message + * This method gets the PB message corresponding to the cell type + * @param q + * @return the cell-type instance from the PB message */ - ByteString getProtoForPromotedType(S s); + public abstract T getCellValueFromProto(Q q); /** - * This method should initialize any field(s) of the ColumnInterpreter with - * a parsing of the passed message bytes (used on the server side). - * @param bytes + * This method gets the PB message corresponding to the promoted type + * @param s + * @return the PB message for the promoted-type instance */ - void initialize(ByteString bytes); - + public abstract R getProtoForPromotedType(S s); + /** - * Converts the bytes in the server's response to the expected type S - * @param response - * @return response of type S constructed from the message + * This method gets the promoted type from the proto message + * @param r + * @return the promoted-type instance from the PB message */ - S parseResponseAsPromotedType(byte[] response); - + public abstract S getPromotedValueFromProto(R r); + /** * The response message comes as type S. This will convert/cast it to T. * In some sense, performs the opposite of {@link #castToReturnType(Object)} * @param response * @return cast */ - T castToCellType(S response); + public abstract T castToCellType(S response); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 01d8428..a18cbba 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -27,7 +27,10 @@ import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -1917,4 +1920,37 @@ public final class ProtobufUtil { KeyValue.Type.codeToType((byte)kv.getKeyType().getNumber()), kv.getValue().toByteArray()); } + + /** + * Get an instance of the generic argument passed to a class + * @param runtimeClass the runtime type of the class + * @param position the position of the argument in the class declaration + * @param b the ByteString which should be parsed to get the instance created + * @return + * @throws IOException + */ + @SuppressWarnings("unchecked") + public static + T getParsedGenericInstance(Class runtimeClass, int position, ByteString b) + throws IOException { + Type type = runtimeClass.getGenericSuperclass(); + Type argType = ((ParameterizedType)type).getActualTypeArguments()[position]; + Class classType = (Class)argType; + T inst; + try { + Method m = classType.getMethod("parseFrom", ByteString.class); + inst = (T)m.invoke(null, b); + return inst; + } catch (SecurityException e) { + throw new IOException(e); + } catch (NoSuchMethodException e) { + throw new IOException(e); + } catch (IllegalArgumentException e) { + throw new IOException(e); + } catch (InvocationTargetException e) { + throw new IOException(e); + } catch (IllegalAccessException e) { + throw new IOException(e); + } + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java index 7d735cc..cbcad2c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java @@ -31,6 +31,8 @@ import org.apache.hadoop.hbase.client.coprocessor.AggregationClient; import org.apache.hadoop.hbase.client.coprocessor.LongColumnInterpreter; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.LongMsg; import org.apache.hadoop.hbase.util.Bytes; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -131,7 +133,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long median = aClient.median(TEST_TABLE, ci, scan); assertEquals(8L, median); @@ -153,7 +156,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.setStartRow(ROWS[2]); scan.setStopRow(ROWS[14]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long rowCount = aClient.rowCount(TEST_TABLE, ci, scan); assertEquals(12, rowCount); } @@ -168,7 +172,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long rowCount = aClient.rowCount(TEST_TABLE, ci, scan); assertEquals(ROWSIZE, rowCount); @@ -187,7 +192,8 @@ public class TestAggregateProtocol { scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[2]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long rowCount = -1; try { rowCount = aClient.rowCount(TEST_TABLE, ci, scan); @@ -211,7 +217,8 @@ public class TestAggregateProtocol { scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[5]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long rowCount = -1; try { rowCount = aClient.rowCount(TEST_TABLE, ci, scan); @@ -230,7 +237,8 @@ public class TestAggregateProtocol { Scan scan = new Scan(); scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long rowCount = -1; try { rowCount = aClient.rowCount(TEST_TABLE, ci, scan); @@ -245,7 +253,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long rowCount = aClient.rowCount(TEST_TABLE, ci, scan); assertEquals(20, rowCount); @@ -256,7 +265,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); scan.setFilter(f); long rowCount = aClient.rowCount(TEST_TABLE, ci, @@ -277,7 +287,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long maximum = aClient.max(TEST_TABLE, ci, scan); assertEquals(19, maximum); } @@ -292,7 +303,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long max = aClient.max(TEST_TABLE, ci, scan); assertEquals(14, max); } @@ -302,7 +314,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long maximum = aClient.max(TEST_TABLE, ci, scan); assertEquals(190, maximum); } @@ -314,7 +327,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long max = aClient.max(TEST_TABLE, ci, scan); assertEquals(60, max); } @@ -322,7 +336,8 @@ public class TestAggregateProtocol { @Test public void testMaxWithValidRangeWithNullCF() { AggregationClient aClient = new AggregationClient(conf); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Scan scan = new Scan(); Long max = null; try { @@ -337,7 +352,8 @@ public class TestAggregateProtocol { @Test public void testMaxWithInvalidRange() { AggregationClient aClient = new AggregationClient(conf); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Scan scan = new Scan(); scan.setStartRow(ROWS[4]); scan.setStopRow(ROWS[2]); @@ -360,7 +376,8 @@ public class TestAggregateProtocol { scan.setStopRow(ROWS[4]); try { AggregationClient aClient = new AggregationClient(conf); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); max = aClient.max(TEST_TABLE, ci, scan); } catch (Exception e) { max = 0; @@ -376,7 +393,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); scan.setFilter(f); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); max = aClient.max(TEST_TABLE, ci, scan); assertEquals(null, max); } @@ -395,7 +413,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.setStartRow(HConstants.EMPTY_START_ROW); scan.setStopRow(HConstants.EMPTY_END_ROW); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Long min = aClient.min(TEST_TABLE, ci, scan); assertEquals(0l, min.longValue()); @@ -411,7 +430,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long min = aClient.min(TEST_TABLE, ci, scan); assertEquals(5, min); } @@ -423,7 +443,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(HConstants.EMPTY_START_ROW); scan.setStopRow(HConstants.EMPTY_END_ROW); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long min = aClient.min(TEST_TABLE, ci, scan); assertEquals(0, min); @@ -436,7 +457,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long min = aClient.min(TEST_TABLE, ci, scan); assertEquals(6, min); } @@ -447,7 +469,8 @@ public class TestAggregateProtocol { Scan scan = new Scan(); scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Long min = null; try { min = aClient.min(TEST_TABLE, ci, scan); @@ -465,7 +488,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[4]); scan.setStopRow(ROWS[2]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); try { min = aClient.min(TEST_TABLE, ci, scan); } catch (Throwable e) { @@ -480,7 +504,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[6]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Long min = null; try { min = aClient.min(TEST_TABLE, ci, scan); @@ -496,7 +521,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY, TEST_QUALIFIER); Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); scan.setFilter(f); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Long min = null; min = aClient.min(TEST_TABLE, ci, scan); assertEquals(null, min); @@ -513,7 +539,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(190, sum); @@ -529,7 +556,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(95, sum); } @@ -539,7 +567,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(190 + 1900, sum); @@ -552,7 +581,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); long sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(6 + 60, sum); } @@ -563,7 +593,8 @@ public class TestAggregateProtocol { Scan scan = new Scan(); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Long sum = null; try { sum = aClient.sum(TEST_TABLE, ci, scan); @@ -580,7 +611,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[2]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Long sum = null; try { sum = aClient.sum(TEST_TABLE, ci, scan); @@ -596,7 +628,8 @@ public class TestAggregateProtocol { Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); scan.setFilter(f); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Long sum = null; sum = aClient.sum(TEST_TABLE, ci, scan); assertEquals(null, sum); @@ -613,7 +646,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); double avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(9.5, avg, 0); @@ -629,7 +663,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); double avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(9.5, avg, 0); } @@ -639,7 +674,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); double avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(104.5, avg, 0); @@ -652,7 +688,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); double avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(6 + 60, avg, 0); } @@ -661,7 +698,8 @@ public class TestAggregateProtocol { public void testAvgWithValidRangeWithNullCF() { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Double avg = null; try { avg = aClient.avg(TEST_TABLE, ci, scan); @@ -678,7 +716,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[1]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Double avg = null; try { avg = aClient.avg(TEST_TABLE, ci, scan); @@ -694,7 +733,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); Filter f = new PrefixFilter(Bytes.toBytes("foo:bar")); scan.setFilter(f); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Double avg = null; avg = aClient.avg(TEST_TABLE, ci, scan); assertEquals(Double.NaN, avg, 0); @@ -711,7 +751,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); double std = aClient.std(TEST_TABLE, ci, scan); assertEquals(5.766, std, 0.05d); @@ -727,7 +768,8 @@ public class TestAggregateProtocol { scan.addColumn(TEST_FAMILY,TEST_QUALIFIER); scan.setStartRow(ROWS[5]); scan.setStopRow(ROWS[15]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); double std = aClient.std(TEST_TABLE, ci, scan); assertEquals(2.87, std, 0.05d); } @@ -737,7 +779,8 @@ public class TestAggregateProtocol { AggregationClient aClient = new AggregationClient(conf); Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); double std = aClient.std(TEST_TABLE, ci, scan); assertEquals(63.42, std, 0.05d); @@ -750,7 +793,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[7]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); double std = aClient.std(TEST_TABLE, ci, scan); assertEquals(0, std, 0); } @@ -761,7 +805,8 @@ public class TestAggregateProtocol { Scan scan = new Scan(); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[17]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Double std = null; try { std = aClient.std(TEST_TABLE, ci, scan); @@ -778,7 +823,8 @@ public class TestAggregateProtocol { scan.addFamily(TEST_FAMILY); scan.setStartRow(ROWS[6]); scan.setStopRow(ROWS[1]); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Double std = null; try { std = aClient.std(TEST_TABLE, ci, scan); @@ -794,7 +840,8 @@ public class TestAggregateProtocol { Scan scan = new Scan(); scan.addFamily(TEST_FAMILY); scan.setFilter(f); - final ColumnInterpreter ci = new LongColumnInterpreter(); + final ColumnInterpreter ci = + new LongColumnInterpreter(); Double std = null; std = aClient.std(TEST_TABLE, ci, scan); assertEquals(Double.NaN, std, 0);