From 54a88928ed269d0e6627ade8818361a6b6f44755 Mon Sep 17 00:00:00 2001 From: QilinCao Date: Mon, 30 Oct 2017 16:52:07 +0800 Subject: [PATCH] HBASE-19103 Add BigDecimalComparator for filter --- .../hadoop/hbase/filter/BigDecimalComparator.java | 115 +++++ .../hbase/protobuf/generated/ComparatorProtos.java | 570 ++++++++++++++++++++- hbase-protocol/src/main/protobuf/Comparator.proto | 4 + .../hbase/filter/TestBigDecimalComparator.java | 92 ++++ .../hbase/filter/TestComparatorSerialization.java | 9 + .../hadoop/hbase/regionserver/TestHRegion.java | 43 ++ 6 files changed, 831 insertions(+), 2 deletions(-) create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java new file mode 100644 index 0000000..cbe8d5e --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java @@ -0,0 +1,115 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.filter; + +import java.math.BigDecimal; + +import com.google.protobuf.InvalidProtocolBufferException; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; +import org.apache.hadoop.hbase.util.Bytes; + +/** + * A BigDecimal comparator which numerical compares against the specified byte array + */ +@InterfaceAudience.Public +@InterfaceStability.Stable +public class BigDecimalComparator extends ByteArrayComparable { + private BigDecimal bigDecimal; + private final int hashCode; + + public BigDecimalComparator(BigDecimal value) { + super(Bytes.toBytes(value)); + this.bigDecimal = value; + this.hashCode = value.hashCode(); + } + + @Override + public int compareTo(byte[] value, int offset, int length) { + BigDecimal that = Bytes.toBigDecimal(value, offset, length); + return this.bigDecimal.compareTo(that); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || !(obj instanceof BigDecimalComparator)) { + return false; + } + if (this == obj) { + return true; + } + BigDecimalComparator bdc = (BigDecimalComparator) obj; + return bdc.hashCode() == hashCode && this.toString().equals(bdc.toString()); + } + + @Override + public int hashCode() { + return hashCode; + } + + @Override + public String toString() { + return this.bigDecimal.toString(); + } + + /** + * @return The comparator serialized using pb + */ + @Override + public byte[] toByteArray() { + ComparatorProtos.BigDecimalComparator.Builder builder = + ComparatorProtos.BigDecimalComparator.newBuilder(); + builder.setComparable(super.convert()); + return builder.build().toByteArray(); + } + + /** + * @param pbBytes A pb serialized {@link BigDecimalComparator} instance + * @return An instance of {@link BigDecimalComparator} made from bytes + * @throws org.apache.hadoop.hbase.exceptions.DeserializationException + * @see #toByteArray + */ + public static BigDecimalComparator parseFrom(final byte[] pbBytes) + throws DeserializationException { + ComparatorProtos.BigDecimalComparator proto; + try { + proto = ComparatorProtos.BigDecimalComparator.parseFrom(pbBytes); + } catch (InvalidProtocolBufferException e) { + throw new DeserializationException(e); + } + return new BigDecimalComparator(Bytes.toBigDecimal(proto.getComparable().getValue() + .toByteArray())); + } + + /** + * @param other + * @return true if and only if the fields of the comparator that are serialized are equal to the + * corresponding fields in other. Used for testing. + */ + boolean areSerializedFieldsEqual(BigDecimalComparator other) { + if (other == this) { + return true; + } + return super.areSerializedFieldsEqual(other); + } +} diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java index ea42a81..d2fbd50 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java @@ -5273,6 +5273,559 @@ public final class ComparatorProtos { // @@protoc_insertion_point(class_scope:SubstringComparator) } + public interface BigDecimalComparatorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ByteArrayComparable comparable = 1; + /** + * required .ByteArrayComparable comparable = 1; + */ + boolean hasComparable(); + /** + * required .ByteArrayComparable comparable = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable(); + /** + * required .ByteArrayComparable comparable = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder(); + } + /** + * Protobuf type {@code BigDecimalComparator} + */ + public static final class BigDecimalComparator extends + com.google.protobuf.GeneratedMessage + implements BigDecimalComparatorOrBuilder { + // Use BigDecimalComparator.newBuilder() to construct. + private BigDecimalComparator(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private BigDecimalComparator(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final BigDecimalComparator defaultInstance; + public static BigDecimalComparator getDefaultInstance() { + return defaultInstance; + } + + public BigDecimalComparator getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BigDecimalComparator( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = comparable_.toBuilder(); + } + comparable_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(comparable_); + comparable_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BigDecimalComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BigDecimalComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BigDecimalComparator parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BigDecimalComparator(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .ByteArrayComparable comparable = 1; + public static final int COMPARABLE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_; + /** + * required .ByteArrayComparable comparable = 1; + */ + public boolean hasComparable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { + return comparable_; + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { + return comparable_; + } + + private void initFields() { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasComparable()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, comparable_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, comparable_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator other = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator) obj; + + boolean result = true; + result = result && (hasComparable() == other.hasComparable()); + if (hasComparable()) { + result = result && getComparable() + .equals(other.getComparable()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasComparable()) { + hash = (37 * hash) + COMPARABLE_FIELD_NUMBER; + hash = (53 * hash) + getComparable().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code BigDecimalComparator} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparatorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BigDecimalComparator_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BigDecimalComparator_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator.class, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getComparableFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (comparableBuilder_ == null) { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + } else { + comparableBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.internal_static_BigDecimalComparator_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator build() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator result = new org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (comparableBuilder_ == null) { + result.comparable_ = comparable_; + } else { + result.comparable_ = comparableBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator.getDefaultInstance()) return this; + if (other.hasComparable()) { + mergeComparable(other.getComparable()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasComparable()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.BigDecimalComparator) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .ByteArrayComparable comparable = 1; + private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> comparableBuilder_; + /** + * required .ByteArrayComparable comparable = 1; + */ + public boolean hasComparable() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable getComparable() { + if (comparableBuilder_ == null) { + return comparable_; + } else { + return comparableBuilder_.getMessage(); + } + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public Builder setComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { + if (comparableBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + comparable_ = value; + onChanged(); + } else { + comparableBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public Builder setComparable( + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder builderForValue) { + if (comparableBuilder_ == null) { + comparable_ = builderForValue.build(); + onChanged(); + } else { + comparableBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public Builder mergeComparable(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable value) { + if (comparableBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + comparable_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance()) { + comparable_ = + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.newBuilder(comparable_).mergeFrom(value).buildPartial(); + } else { + comparable_ = value; + } + onChanged(); + } else { + comparableBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public Builder clearComparable() { + if (comparableBuilder_ == null) { + comparable_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.getDefaultInstance(); + onChanged(); + } else { + comparableBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder getComparableBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getComparableFieldBuilder().getBuilder(); + } + /** + * required .ByteArrayComparable comparable = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder getComparableOrBuilder() { + if (comparableBuilder_ != null) { + return comparableBuilder_.getMessageOrBuilder(); + } else { + return comparable_; + } + } + /** + * required .ByteArrayComparable comparable = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder> + getComparableFieldBuilder() { + if (comparableBuilder_ == null) { + comparableBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparable.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ByteArrayComparableOrBuilder>( + comparable_, + getParentForChildren(), + isClean()); + comparable_ = null; + } + return comparableBuilder_; + } + + // @@protoc_insertion_point(builder_scope:BigDecimalComparator) + } + + static { + defaultInstance = new BigDecimalComparator(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BigDecimalComparator) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_Comparator_descriptor; private static @@ -5318,6 +5871,11 @@ public final class ComparatorProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_SubstringComparator_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BigDecimalComparator_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BigDecimalComparator_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -5342,8 +5900,10 @@ public final class ComparatorProtos { "ingComparator\022\017\n\007pattern\030\001 \002(\t\022\025\n\rpatter" + "n_flags\030\002 \002(\005\022\017\n\007charset\030\003 \002(\t\022\016\n\006engine" + "\030\004 \001(\t\"%\n\023SubstringComparator\022\016\n\006substr\030" + - "\001 \002(\tBF\n*org.apache.hadoop.hbase.protobu" + - "f.generatedB\020ComparatorProtosH\001\210\001\001\240\001\001" + "\001 \002(\t\"@\n\024BigDecimalComparator\022(\n\ncompara" + + "ble\030\001 \002(\0132\024.ByteArrayComparableBF\n*org.a" + + "pache.hadoop.hbase.protobuf.generatedB\020C" + + "omparatorProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -5404,6 +5964,12 @@ public final class ComparatorProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SubstringComparator_descriptor, new java.lang.String[] { "Substr", }); + internal_static_BigDecimalComparator_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_BigDecimalComparator_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BigDecimalComparator_descriptor, + new java.lang.String[] { "Comparable", }); return null; } }; diff --git a/hbase-protocol/src/main/protobuf/Comparator.proto b/hbase-protocol/src/main/protobuf/Comparator.proto index a44ec84..c4178ee 100644 --- a/hbase-protocol/src/main/protobuf/Comparator.proto +++ b/hbase-protocol/src/main/protobuf/Comparator.proto @@ -71,3 +71,7 @@ message RegexStringComparator { message SubstringComparator { required string substr = 1; } + +message BigDecimalComparator { + required ByteArrayComparable comparable = 1; +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java new file mode 100644 index 0000000..b3ed560 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hadoop.hbase.filter; + +import java.math.BigDecimal; + +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Assert; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestBigDecimalComparator { + + @Test + public void testEqualsValue() { + // given + BigDecimal bd1 = new BigDecimal(Double.MAX_VALUE); + BigDecimal bd2 = new BigDecimal(Double.MIN_VALUE); + byte[] value1 = Bytes.toBytes(bd1); + byte[] value2 = Bytes.toBytes(bd2); + BigDecimalComparator comparator1 = new BigDecimalComparator(bd1); + BigDecimalComparator comparator2 = new BigDecimalComparator(bd2); + + // when + int comp1 = comparator1.compareTo(value1); + int comp2 = comparator2.compareTo(value2); + + // then + Assert.assertEquals(0, comp1); + Assert.assertEquals(0, comp2); + } + + @Test + public void testGreaterThanValue() { + // given + byte[] val1 = Bytes.toBytes(new BigDecimal("1000000000000000000000000000000.9999999999999999")); + byte[] val2 = Bytes.toBytes(new BigDecimal(0)); + byte[] val3 = Bytes.toBytes(new BigDecimal(Double.MIN_VALUE)); + BigDecimal bd = new BigDecimal(Double.MAX_VALUE); + BigDecimalComparator comparator = new BigDecimalComparator(bd); + + // when + int comp1 = comparator.compareTo(val1); + int comp2 = comparator.compareTo(val2); + int comp3 = comparator.compareTo(val3); + + // then + Assert.assertEquals(1, comp1); + Assert.assertEquals(1, comp2); + Assert.assertEquals(1, comp3); + } + + @Test + public void testLessThanValue() { + // given + byte[] val1 = Bytes.toBytes(new BigDecimal("-1000000000000000000000000000000")); + byte[] val2 = Bytes.toBytes(new BigDecimal(0)); + byte[] val3 = Bytes.toBytes(new BigDecimal(1)); + BigDecimal bd = new BigDecimal("-1000000000000000000000000000000.0000000000000001"); + BigDecimalComparator comparator = new BigDecimalComparator(bd); + + // when + int comp1 = comparator.compareTo(val1); + int comp2 = comparator.compareTo(val2); + int comp3 = comparator.compareTo(val3); + + // then + Assert.assertEquals(-1, comp1); + Assert.assertEquals(-1, comp2); + Assert.assertEquals(-1, comp3); + } + +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java index 693f041..5e149f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java @@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.filter; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import java.math.BigDecimal; import java.util.regex.Pattern; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -85,4 +86,12 @@ public class TestComparatorSerialization { ProtobufUtil.toComparator(ProtobufUtil.toComparator(substringComparator)))); } + @Test + public void testBigDecimalComparator() throws Exception { + BigDecimal bigDecimal = new BigDecimal(Double.MIN_VALUE); + BigDecimalComparator bigDecimalComparator = new BigDecimalComparator(bigDecimal); + assertTrue(bigDecimalComparator.areSerializedFieldsEqual(ProtobufUtil.toComparator(ProtobufUtil + .toComparator(bigDecimalComparator)))); + } + } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 4d31374..a9073f0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -45,6 +45,7 @@ import static org.mockito.Mockito.when; import java.io.IOException; import java.io.InterruptedIOException; +import java.math.BigDecimal; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; @@ -112,6 +113,7 @@ import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; +import org.apache.hadoop.hbase.filter.BigDecimalComparator; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; @@ -1743,6 +1745,8 @@ public class TestHRegion { byte[] qf1 = Bytes.toBytes("qualifier"); byte[] val1 = Bytes.toBytes("value1"); byte[] val2 = Bytes.toBytes("value2"); + BigDecimal bd1 = new BigDecimal(Double.MAX_VALUE); + BigDecimal bd2 = new BigDecimal(Double.MIN_VALUE); // Setting up region String method = this.getName(); @@ -1764,6 +1768,25 @@ public class TestHRegion { res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2), put, true); assertEquals(false, res); + + // Putting data in key + put = new Put(row1); + put.add(fam1, qf1, Bytes.toBytes(bd1)); + region.put(put); + + // checkAndPut with wrong value + res = + region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BigDecimalComparator(bd2), + put, true); + assertEquals(false, res); + + // checkAndDelete with wrong value + delete = new Delete(row1); + delete.deleteFamily(fam1); + res = + region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BigDecimalComparator(bd2), + put, true); + assertEquals(false, res); } finally { HRegion.closeHRegion(this.region); this.region = null; @@ -1776,6 +1799,7 @@ public class TestHRegion { byte[] fam1 = Bytes.toBytes("fam1"); byte[] qf1 = Bytes.toBytes("qualifier"); byte[] val1 = Bytes.toBytes("value1"); + BigDecimal bd1 = new BigDecimal(Double.MIN_VALUE); // Setting up region String method = this.getName(); @@ -1797,6 +1821,25 @@ public class TestHRegion { res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1), delete, true); assertEquals(true, res); + + // Putting data in key + put = new Put(row1); + put.add(fam1, qf1, Bytes.toBytes(bd1)); + region.put(put); + + // checkAndPut with correct value + res = + region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BigDecimalComparator(bd1), + put, true); + assertEquals(true, res); + + // checkAndDelete with correct value + delete = new Delete(row1); + delete.deleteColumn(fam1, qf1); + res = + region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BigDecimalComparator(bd1), + delete, true); + assertEquals(true, res); } finally { HRegion.closeHRegion(this.region); this.region = null; -- 1.9.1