diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java new file mode 100644 index 0000000..91e9524 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.types; + +import com.google.protobuf.CodedInputStream; +import com.google.protobuf.CodedOutputStream; +import com.google.protobuf.Message; +import org.apache.hadoop.hbase.util.Order; +import org.apache.hadoop.hbase.util.PositionedByteRange; + +/** + * A base-class for {@link DataType} implementations backed by protobuf. See + * {@code PBKeyValue} in {@code hbase-examples} module. + */ +public abstract class PBType implements DataType { + @Override + public boolean isOrderPreserving() { + return false; + } + + @Override + public Order getOrder() { + return null; + } + + @Override + public boolean isNullable() { + return false; + } + + @Override + public boolean isSkippable() { + return true; + } + + @Override + public int encodedLength(T val) { + return val.getSerializedSize(); + } + + /** + * Create a {@link CodedInputStream} from a {@link PositionedByteRange}. Be sure to update + * {@code src}'s position after consuming from the stream. + *

For example: + *

+   * Foo.Builder builder = ...
+   * CodedInputStream is = inputStreamFromByteRange(src);
+   * Foo ret = builder.mergeFrom(is).build();
+   * src.setPosition(src.getPosition() + is.getTotalBytesRead());
+   * 
+ */ + public static CodedInputStream inputStreamFromByteRange(PositionedByteRange src) { + return CodedInputStream.newInstance( + src.getBytes(), + src.getOffset() + src.getPosition(), + src.getRemaining()); + } + + /** + * Create a {@link CodedOutputStream} from a {@link PositionedByteRange}. Be sure to update + * {@code dst}'s position after writing to the stream. + *

For example: + *

+   * CodedOutputStream os = outputStreamFromByteRange(dst);
+   * int before = os.spaceLeft(), after, written;
+   * val.writeTo(os);
+   * after = os.spaceLeft();
+   * written = before - after;
+   * dst.setPosition(dst.getPosition() + written);
+   * 
+ */ + public static CodedOutputStream outputStreamFromByteRange(PositionedByteRange dst) { + return CodedOutputStream.newInstance( + dst.getBytes(), + dst.getOffset() + dst.getPosition(), + dst.getRemaining() + ); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java index 20cf767..d8ee65e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java @@ -56,6 +56,8 @@ import com.google.common.annotations.VisibleForTesting; * NaN0x25 * fixed-length 32-bit integer0x27, I * fixed-length 64-bit integer0x28, I + * fixed-length 8-bit integer0x29 + * fixed-length 16-bit integer0x2a * fixed-length 32-bit float0x30, F * fixed-length 64-bit float0x31, F * TEXT0x33, T diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java new file mode 100644 index 0000000..96ecf28 --- /dev/null +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java @@ -0,0 +1,77 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.types; + +import com.google.protobuf.CodedInputStream; +import com.google.protobuf.CodedOutputStream; +import org.apache.hadoop.hbase.protobuf.generated.CellProtos; +import org.apache.hadoop.hbase.util.PositionedByteRange; + +import java.io.IOException; + +/** + * An example for using protobuf objects with {@link DataType} API. + */ +public class PBCell extends PBType { + @Override + public Class encodedClass() { + return CellProtos.Cell.class; + } + + @Override + public int skip(PositionedByteRange src) { + CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder(); + CodedInputStream is = inputStreamFromByteRange(src); + try { + builder.mergeFrom(is); + int consumed = is.getTotalBytesRead(); + src.setPosition(src.getPosition() + consumed); + return consumed; + } catch (IOException e) { + throw new RuntimeException("Error while skipping type.", e); + } + } + + @Override + public CellProtos.Cell decode(PositionedByteRange src) { + CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder(); + CodedInputStream is = inputStreamFromByteRange(src); + try { + CellProtos.Cell ret = builder.mergeFrom(is).build(); + src.setPosition(src.getPosition() + is.getTotalBytesRead()); + return ret; + } catch (IOException e) { + throw new RuntimeException("Error while decoding type.", e); + } + } + + @Override + public int encode(PositionedByteRange dst, CellProtos.Cell val) { + CodedOutputStream os = outputStreamFromByteRange(dst); + try { + int before = os.spaceLeft(), after, written; + val.writeTo(os); + after = os.spaceLeft(); + written = before - after; + dst.setPosition(dst.getPosition() + written); + return written; + } catch (IOException e) { + throw new RuntimeException("Error while encoding type.", e); + } + } +} diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java new file mode 100644 index 0000000..952a319 --- /dev/null +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.types; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.CellProtos; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.hadoop.hbase.util.SimplePositionedByteRange; +import org.junit.Test; + +public class TestPBCell { + + private static final PBCell CODEC = new PBCell(); + + /** + * Basic test to verify utility methods in {@link PBType} and delegation to protobuf works. + */ + @Test + public void testRoundTrip() { + final Cell cell = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), + Bytes.toBytes("qual"), Bytes.toBytes("val")); + CellProtos.Cell c = ProtobufUtil.toCell(cell), decoded; + PositionedByteRange pbr = new SimplePositionedByteRange(c.getSerializedSize()); + pbr.setPosition(0); + int encodedLength = CODEC.encode(pbr, c); + pbr.setPosition(0); + decoded = CODEC.decode(pbr); + assertEquals(encodedLength, pbr.getPosition()); + assertTrue(CellComparator.equals(cell, ProtobufUtil.toCell(decoded))); + } +}