diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index b22f824..d8d75ab 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -2329,13 +2329,15 @@ public class KeyValue implements Cell, HeapSize { * @param kv * @param out * @throws IOException + * @return Bytes write on the output stream */ - public static void write(final KeyValue kv, final DataOutput out) throws IOException { + public static long write(final KeyValue kv, final DataOutput out) throws IOException { // This is how the old Writables write used to serialize KVs. Need to figure way to make it work for all // implementations. int length = kv.getLength(); out.writeInt(length); out.write(kv.getBuffer(), kv.getOffset(), length); + return length + Bytes.SIZEOF_INT; } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/OldSchoolKeyValueDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/OldSchoolKeyValueDecoder.java new file mode 100644 index 0000000..1591c41 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/OldSchoolKeyValueDecoder.java @@ -0,0 +1,7 @@ +package org.apache.hadoop.hbase.io.encoding; + +import org.apache.hbase.cell.Decoder; + +public class OldSchoolKeyValueDecoder implements Decoder { + +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/OldSchoolKeyValueEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/OldSchoolKeyValueEncoder.java new file mode 100644 index 0000000..8778e01 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/OldSchoolKeyValueEncoder.java @@ -0,0 +1,54 @@ +package org.apache.hadoop.hbase.io.encoding; + +import java.io.DataOutputStream; +import java.io.IOException; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hbase.Cell; +import org.apache.hbase.cell.Encoder; + +/** + * Encodes by casting Cell to KeyValue and writing out the backing array with a length prefix. + * This is how KVs were serialized in Puts, Deletes and Results pre-0.96. Its what would + * happen if you called the Writable#write KeyValue implementation. This encoder will fail + * if the passed Cell is not an old school pre-0.96 KeyValue. + */ +public class OldSchoolKeyValueEncoder implements Encoder { + // Need to be able to write java types such as int and long so need DataOutput. + // Want to stream too so DataOutputStream. + private final DataOutputStream out; + long size = 0; + long count = 0; + + private final Encoder.Context context = new Encoder.Context() { + @Override + public long size() { + return size; + } + + @Override + public long count() { + return count; + } + }; + + public Encoder.Context getContext() { + return context; + } + + public OldSchoolKeyValueEncoder(final DataOutputStream out) { + this.out = out; + } + + @Override + public void write(Cell cell) throws IOException { + // This is crass and will not work when KV changes + this.size += KeyValue.write((KeyValue)cell, this.out); + this.count++; + } + + @Override + public void flush() throws IOException { + this.out.flush(); + } +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/cell/Decoder.java b/hbase-common/src/main/java/org/apache/hbase/cell/Decoder.java new file mode 100644 index 0000000..1c5de63 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hbase/cell/Decoder.java @@ -0,0 +1,7 @@ +package org.apache.hbase.cell; + + +public interface Decoder { + public interface Context { + } +} diff --git a/hbase-common/src/main/java/org/apache/hbase/cell/Encoder.java b/hbase-common/src/main/java/org/apache/hbase/cell/Encoder.java new file mode 100644 index 0000000..e5b98e5 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hbase/cell/Encoder.java @@ -0,0 +1,42 @@ +package org.apache.hbase.cell; + +import java.io.IOException; + +import org.apache.hbase.Cell; + +/** + * Cell encoder. + * TODO: Implement CellOutputStream when it has IOException added to its' methods (we + * repeat flush and write below from COS in the mean time). + */ +public interface Encoder { + /** + * Encoder context + */ + public interface Context { + public long size(); + public long count(); + } + + /** + * @return Encoder context + */ + Encoder.Context getContext(); + + /** + * Implementation must copy the entire state of the Cell. If the appended Cell is modified + * immediately after the append method returns, the modifications must have absolutely no effect + * on the copy of the Cell that was added to the appender. For example, calling someList.add(cell) + * is not correct. + * @throws IOException + */ + void write(Cell cell) throws IOException; + + /** + * Let the implementation decide what to do. Usually means writing accumulated data into a byte[] + * that can then be read from the implementation to be sent to disk, put in the block cache, or + * sent over the network. + * @throws IOException + */ + void flush() throws IOException; +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java index 925801a..590baf5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java @@ -16,14 +16,10 @@ */ package org.apache.hadoop.hbase.io.encoding; -import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; -import java.io.FilterOutputStream; import java.io.IOException; -import java.lang.reflect.Field; import java.nio.ByteBuffer; -import java.util.Arrays; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.KeyValue;