diff --git a/hbase-common/src/main/java/org/apache/hbase/codec/CellDecoder.java b/hbase-common/src/main/java/org/apache/hbase/codec/CellDecoder.java new file mode 100644 index 0000000..38a4e03 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hbase/codec/CellDecoder.java @@ -0,0 +1,71 @@ +package org.apache.hbase.codec; + +import java.io.DataInputStream; +import java.io.IOException; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.io.WritableUtils; +import org.apache.hbase.Cell; +import org.apache.hbase.io.CellScanner; + +/** + * @see CellEncoder + */ +public class CellDecoder implements CellScanner { + private final DataInputStream in; + // If true, this decoder is finished decoding. + private boolean finished = false; + private Cell current = null; + + public CellDecoder(final DataInputStream in) { + this.in = in; + } + + @Override + public boolean next() throws IOException { + if (this.finished) return false; + int length; + try { + length = WritableUtils.readVInt(this.in); + } catch (IOException e) { + throw new CodecException(e); + } + if (length == CellEncoder.END_OF_CELLS) { + this.finished = true; + } else { + // Presume we are on to a cell... read it in. + try { + byte [] row = new byte [length]; + this.in.readFully(row, 0, length); + byte [] family = readByteArray(); + byte [] qualifier = readByteArray(); + long timestamp = this.in.readLong(); + byte type = this.in.readByte(); + byte [] value = readByteArray(); + // I need a Cell Factory here. Using KeyValue for now. TODO. + this.current = new KeyValue(row, family, qualifier, timestamp, + KeyValue.Type.codeToType(type), value); + } catch (IOException e) { + throw new CodecException(e); + } + } + return !this.finished; + } + + /** + * @return Byte array read from the stream. + * @throws IOException + */ + private byte [] readByteArray() throws IOException { + // TODO: A method like this belongs in CellTool? + int length = WritableUtils.readVInt(this.in); + byte [] bytes = new byte [length]; + this.in.readFully(bytes, 0, length); + return bytes; + } + + @Override + public Cell getCurrent() { + return this.current; + } +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/codec/CellEncoder.java b/hbase-common/src/main/java/org/apache/hbase/codec/CellEncoder.java new file mode 100644 index 0000000..476d753 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hbase/codec/CellEncoder.java @@ -0,0 +1,79 @@ +package org.apache.hbase.codec; + +import java.io.DataOutputStream; +import java.io.IOException; + +import org.apache.hadoop.io.WritableUtils; +import org.apache.hbase.Cell; +import org.apache.hbase.io.CellOutputStream; + +/** + * Cell encoder that just writes out all the individual elements of a Cell. + * Does not write the mvcc stamp. Use a different encoder if you want that + * in the stream. + */ +public class CellEncoder implements CellOutputStream { + // TODO: Replace WritableUtils from hadoop vint w/ our own or something faster? + // TOOD: I need a CellFactory to make Cells with. Using KeyValue for now. + + // We write out an '0' int as marker that there are no more kvs when you call flush. + static final int END_OF_CELLS = 0; + // Need to be able to write java types such as int and long so need DataOutput. + // Want to stream too so DataOutputStream. + private final DataOutputStream out; + // This encoder is 'done' once flush has been called because on flush we + // write out the END_OF_KEYVALUES marker. + private boolean finish = false; + + public CellEncoder(final DataOutputStream out) { + this.out = out; + } + + @Override + public void write(Cell cell) throws IOException { + if (this.finish) throw new CodecException("Finished"); + // This is crass and will not work when KV changes + try { + // Row + write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); + // Column family + write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()); + // Qualifier + write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); + // Version + this.out.writeLong(cell.getTimestamp()); + // Type + this.out.writeByte(cell.getTypeByte()); + // Value + write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); + } catch (IOException e) { + throw new CodecException(e); + } + } + + /** + * Write vint length followed by array bytes. + * @param bytes + * @param offset + * @param length + * @throws IOException + */ + private void write(final byte [] bytes, final int offset, final int length) + throws IOException { + WritableUtils.writeVInt(this.out, length); + this.out.write(bytes, offset, length); + } + + @Override + public void flush() throws IOException { + if (this.finish) return; + this.finish = true; + try { + // Write out an vint whose value is zero as end of stream. + WritableUtils.writeVInt(this.out, END_OF_CELLS); + this.out.flush(); + } catch (IOException e) { + throw new CodecException(e); + } + } +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/codec/CodecException.java b/hbase-common/src/main/java/org/apache/hbase/codec/CodecException.java new file mode 100644 index 0000000..d3f5bff --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hbase/codec/CodecException.java @@ -0,0 +1,22 @@ +package org.apache.hbase.codec; + +import java.io.IOException; + +public class CodecException extends IOException { + private static final long serialVersionUID = -2850095011686914405L; + + public CodecException() { + } + + public CodecException(String message) { + super(message); + } + + public CodecException(Throwable t) { + super(t); + } + + public CodecException(String message, Throwable t) { + super(message, t); + } +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/codec/OldSchoolKeyValueDecoder.java b/hbase-common/src/main/java/org/apache/hbase/codec/OldSchoolKeyValueDecoder.java new file mode 100644 index 0000000..f0eb1e4 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hbase/codec/OldSchoolKeyValueDecoder.java @@ -0,0 +1,48 @@ +package org.apache.hbase.codec; + +import java.io.DataInputStream; +import java.io.IOException; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hbase.Cell; +import org.apache.hbase.io.CellScanner; + +/** + * @see OldSchoolKeyValueEncoder + */ +public class OldSchoolKeyValueDecoder implements CellScanner { + private final DataInputStream in; + private Cell current = null; + // If true, this decoder is finished decoding. + private boolean finished = false; + + public OldSchoolKeyValueDecoder(final DataInputStream in) { + this.in = in; + } + + @Override + public boolean next() throws CodecException { + if (this.finished) return false; + int length; + try { + length = this.in.readInt(); + } catch (IOException e) { + throw new CodecException(e); + } + if (length == OldSchoolKeyValueEncoder.END_OF_KEYVALUES) { + this.finished = true; + } else { + try { + this.current = KeyValue.create(length, this.in); + } catch (IOException e) { + throw new CodecException(e); + } + } + return !this.finished; + } + + @Override + public Cell getCurrent() { + return this.current; + } +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/codec/OldSchoolKeyValueEncoder.java b/hbase-common/src/main/java/org/apache/hbase/codec/OldSchoolKeyValueEncoder.java new file mode 100644 index 0000000..ebb2483 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hbase/codec/OldSchoolKeyValueEncoder.java @@ -0,0 +1,70 @@ +package org.apache.hbase.codec; + +import java.io.DataOutputStream; +import java.io.IOException; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hbase.Cell; +import org.apache.hbase.io.CellOutputStream; + +/** + * Encodes by casting Cell to KeyValue and writing out the backing array with a length prefix. + * This is how KVs were serialized in Puts, Deletes and Results pre-0.96. Its what would + * happen if you called the Writable#write KeyValue implementation. This encoder will fail + * if the passed Cell is not an old-school pre-0.96 KeyValue. Does not copy bytes writing. + * It just writes them direct to the passed stream. When {@link #flush} is called, + * we write out an End-Of-KeyValues marker to the stream and subsequent {@link #write(Cell)} + * calls will throw an exception. + * + *

If you wrote two KeyValues to this encoder, it would look like this in the stream: + *

+ * length-of-KeyValue1 // A java int with the length of KeyValue1 backing array
+ * KeyValue1 backing array filled with a KeyValue serialized in its particular format
+ * length-of-KeyValue2
+ * KeyValue2 backing array
+ * length-of-zero // A java int whose value is 0; marks end of the encoded section
+ * 
+ * @see OldSchoolKeyValueDecoder + */ +public class OldSchoolKeyValueEncoder implements CellOutputStream { + // We write out an '0' int as marker that there are no more kvs when you call flush. + static final int END_OF_KEYVALUES = 0; + // Need to be able to write java types such as int and long so need DataOutput. + // Want to stream too so DataOutputStream. + private final DataOutputStream out; + // This encoder is 'done' once flush has been called because on flush we + // write out the END_OF_KEYVALUES marker. + private boolean flush = false; + + public OldSchoolKeyValueEncoder(final DataOutputStream out) { + this.out = out; + } + + @Override + public void write(Cell cell) throws IOException { + if (this.flush) throw new CodecException("Finished"); + // This is crass and will not work when KV changes + try { + KeyValue.write((KeyValue)cell, this.out); + } catch (IOException e) { + throw new CodecException(e); + } + } + + /** + * Calling flush 'finishes' this encoder. Subsequent calls + * to {@link #write(Cell)} will throw exception. + */ + @Override + public void flush() throws IOException { + if (this.flush) return; + this.flush = true; + // Write out an int whose value is zero as end of stream. + try { + this.out.writeInt(END_OF_KEYVALUES); + this.out.flush(); + } catch (IOException e) { + throw new CodecException(e); + } + } +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/CellScanner.java b/hbase-common/src/main/java/org/apache/hbase/io/CellScanner.java index 4196aac..9455037 100644 --- a/hbase-common/src/main/java/org/apache/hbase/io/CellScanner.java +++ b/hbase-common/src/main/java/org/apache/hbase/io/CellScanner.java @@ -57,7 +57,7 @@ import org.apache.hbase.Cell; @InterfaceStability.Unstable public interface CellScanner { /** - * @return the current Cell which may be mutable. Will be null before the first read + * @return the current Cell which may be mutable. Will be null before the first next * has happened. */ Cell getCurrent(); @@ -67,5 +67,5 @@ public interface CellScanner { * @return true if the next cell is found and getCurrentCell() will return a valid Cell * @throws IOException */ - boolean read() throws IOException; + boolean next() throws IOException; } \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/CellScannerPosition.java b/hbase-common/src/main/java/org/apache/hbase/io/CellScannerPosition.java deleted file mode 100644 index e10c4a9..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/CellScannerPosition.java +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hbase.io; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -/** - * An indicator of the state of the scanner after an operation such as nextCell() or positionAt(..). - * For example: - * - */ -@InterfaceAudience.Public -@InterfaceStability.Evolving -public enum CellScannerPosition { - - /** - * getCurrentCell() will NOT return a valid cell. Calling nextCell() will advance to the first - * cell. - */ - BEFORE_FIRST, - - /** - * getCurrentCell() will return a valid cell, but it is not the cell requested by positionAt(..), - * rather it is the nearest cell before the requested cell. - */ - BEFORE, - - /** - * getCurrentCell() will return a valid cell, and it is exactly the cell that was requested by - * positionAt(..). - */ - AT, - - /** - * getCurrentCell() will return a valid cell, but it is not the cell requested by positionAt(..), - * rather it is the nearest cell after the requested cell. - */ - AFTER, - - /** - * getCurrentCell() will NOT return a valid cell. Calling nextCell() will have no effect. - */ - AFTER_LAST - -} diff --git a/hbase-common/src/main/java/org/apache/hbase/io/CellSearcher.java b/hbase-common/src/main/java/org/apache/hbase/io/CellSearcher.java deleted file mode 100644 index a0174a5..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/CellSearcher.java +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hbase.io; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hbase.Cell; - -/** - * Search or seek Cells. - * Methods for seeking to a random {@link Cell} inside a sorted collection of cells. Indicates that - * the implementation is able to navigate between cells without iterating forward through every - * cell. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public interface CellSearcher extends ReversibleCellScanner { - /** - * Do everything within this scanner's power to find the key. Look forward and backwards. - *

- * Abort as soon as we know it can't be found, possibly leaving the Searcher in an invalid state. - *

- * @param key position the CellScanner exactly on this key - * @return true if the cell existed and getCurrentCell() holds a valid cell - */ - boolean positionAt(Cell key); - - /** - * Same as positionAt(..), but go to the extra effort of finding the previous key if there's no - * exact match. - *

- * @param key position the CellScanner on this key or the closest cell before - * @return AT if exact match
- * BEFORE if on last cell before key
- * BEFORE_FIRST if key was before the first cell in this scanner's scope - */ - CellScannerPosition positionAtOrBefore(Cell key); - - /** - * Same as positionAt(..), but go to the extra effort of finding the next key if there's no exact - * match. - *

- * @param key position the CellScanner on this key or the closest cell after - * @return AT if exact match
- * AFTER if on first cell after key
- * AFTER_LAST if key was after the last cell in this scanner's scope - */ - CellScannerPosition positionAtOrAfter(Cell key); - - /** - * Note: Added for backwards compatibility with {@link #KeyValueScanner.reseek()} - *

- * Look for the key, but only look after the current position. Probably not needed for an - * efficient tree implementation, but is important for implementations without random access such - * as unencoded KeyValue blocks. - *

- * @param key position the CellScanner exactly on this key - * @return true if getCurrent() holds a valid cell - */ - boolean seekForwardTo(Cell key); - - /** - * Same as seekForwardTo(..), but go to the extra effort of finding the next key if there's no - * exact match. - *

- * @param key - * @return AT if exact match
- * AFTER if on first cell after key
- * AFTER_LAST if key was after the last cell in this scanner's scope - */ - CellScannerPosition seekForwardToOrBefore(Cell key); - - /** - * Same as seekForwardTo(..), but go to the extra effort of finding the next key if there's no - * exact match. - *

- * @param key - * @return AT if exact match
- * AFTER if on first cell after key
- * AFTER_LAST if key was after the last cell in this scanner's scope - */ - CellScannerPosition seekForwardToOrAfter(Cell key); - - /** - * Note: This may not be appropriate to have in the interface. Need to investigate. - *

- * Position the scanner in an invalid state after the last cell: CellScannerPosition.AFTER_LAST. - * This is used by tests and for handling certain edge cases. - */ - void positionAfterLastCell(); -} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/ReversibleCellScanner.java b/hbase-common/src/main/java/org/apache/hbase/io/ReversibleCellScanner.java deleted file mode 100644 index c0ba46b..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/ReversibleCellScanner.java +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hbase.io; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -/** - * An extension of {@link CellScanner} indicating the scanner supports iterating backwards through cells. - *

- * Note: This was not added to suggest that HBase should support client facing reverse Scanners, but - * because some {@link CellSearcher} implementations, namely PrefixTree, need a method of backing up - * if the positionAt(..) method goes past the requested cell. - */ -@InterfaceAudience.Private -@InterfaceStability.Unstable -public interface ReversibleCellScanner extends CellScanner { - /** - * Try to position the scanner one Cell before the current position. - * @return true if the operation was successful, meaning getCurrentCell() will return a valid - * Cell.
- * false if there were no previous cells, meaning getCurrentCell() will return null. - * Scanner position will be {@link CellScannerPosition.BEFORE_FIRST} - */ - boolean previous(); - - /** - * Try to position the scanner in the row before the current row. - * @param endOfRow true for the last cell in the previous row; false for the first cell - * @return true if the operation was successful, meaning getCurrentCell() will return a valid - * Cell.
- * false if there were no previous cells, meaning getCurrentCell() will return null. - * Scanner position will be {@link CellScannerPosition.BEFORE_FIRST} - */ - boolean previousRow(boolean endOfRow); -} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/codec/CellDecoder.java b/hbase-common/src/main/java/org/apache/hbase/io/codec/CellDecoder.java deleted file mode 100644 index e26e729..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/codec/CellDecoder.java +++ /dev/null @@ -1,66 +0,0 @@ -package org.apache.hbase.io.codec; - -import java.io.DataInputStream; -import java.io.IOException; - -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.io.WritableUtils; -import org.apache.hbase.Cell; - -/** - * @see CellEncoder - */ -public class CellDecoder implements Decoder { - private final DataInputStream in; - // If true, this decoder is finished decoding. - private boolean finished = false; - - public CellDecoder(final DataInputStream in) { - this.in = in; - } - - @Override - public Cell decode() throws CodecException { - Cell cell = null; - if (!this.finished) { - int length; - try { - length = WritableUtils.readVInt(this.in); - } catch (IOException e) { - throw new CodecException(e); - } - if (length == CellEncoder.END_OF_CELLS) { - this.finished = true; - } else { - // Presume we are on to a cell... read it in. - try { - byte [] row = new byte [length]; - this.in.readFully(row, 0, length); - byte [] family = readByteArray(); - byte [] qualifier = readByteArray(); - long timestamp = this.in.readLong(); - byte type = this.in.readByte(); - byte [] value = readByteArray(); - // I need a Cell Factory here. Using KeyValue for now. TODO. - cell = new KeyValue(row, family, qualifier, timestamp, - KeyValue.Type.codeToType(type), value); - } catch (IOException e) { - throw new CodecException(e); - } - } - } - return cell; - } - - /** - * @return Byte array read from the stream. - * @throws IOException - */ - private byte [] readByteArray() throws IOException { - // TODO: A method like this belongs in CellTool? - int length = WritableUtils.readVInt(this.in); - byte [] bytes = new byte [length]; - this.in.readFully(bytes, 0, length); - return bytes; - } -} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/codec/CellEncoder.java b/hbase-common/src/main/java/org/apache/hbase/io/codec/CellEncoder.java deleted file mode 100644 index 317f483..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/codec/CellEncoder.java +++ /dev/null @@ -1,79 +0,0 @@ -package org.apache.hbase.io.codec; - -import java.io.DataOutputStream; -import java.io.IOException; - -import org.apache.hadoop.io.WritableUtils; -import org.apache.hbase.Cell; - -/** - * Cell encoder that just writes out all the individual elements of a Cell. - * Does not write the mvcc stamp. Use a different encoder if you want that - * in the stream. - */ -public class CellEncoder implements Encoder { - // TODO: Replace WritableUtils from hadoop vint w/ our own. - // TOOD: I need a CellFactory to make Cells with. Using KeyValue for now. - - // We write out an '0' int as marker that there are no more kvs when you call flush. - static final int END_OF_CELLS = 0; - // Need to be able to write java types such as int and long so need DataOutput. - // Want to stream too so DataOutputStream. - private final DataOutputStream out; - // This encoder is 'done' once flush has been called because on flush we - // write out the END_OF_KEYVALUES marker. - private boolean finish = false; - - public CellEncoder(final DataOutputStream out) { - this.out = out; - } - - @Override - public Cell encode(Cell cell) throws CodecException { - if (this.finish) throw new CodecException("Finished"); - // This is crass and will not work when KV changes - try { - // Row - write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); - // Column family - write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()); - // Qualifier - write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); - // Version - this.out.writeLong(cell.getTimestamp()); - // Type - this.out.writeByte(cell.getTypeByte()); - // Value - write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); - } catch (IOException e) { - throw new CodecException(e); - } - return cell; - } - - /** - * Write vint length followed by array bytes. - * @param bytes - * @param offset - * @param length - * @throws IOException - */ - private void write(final byte [] bytes, final int offset, final int length) - throws IOException { - WritableUtils.writeVInt(this.out, length); - this.out.write(bytes, offset, length); - } - - @Override - public void finish() throws CodecException { - if (this.finish) return; - this.finish = true; - try { - // Write out an vint whose value is zero as end of stream. - WritableUtils.writeVInt(this.out, END_OF_CELLS); - this.out.flush(); - } catch (IOException e) { - throw new CodecException(e); - } - } -} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/codec/CodecException.java b/hbase-common/src/main/java/org/apache/hbase/io/codec/CodecException.java deleted file mode 100644 index 2d539ee..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/codec/CodecException.java +++ /dev/null @@ -1,22 +0,0 @@ -package org.apache.hbase.io.codec; - -import java.io.IOException; - -public class CodecException extends IOException { - private static final long serialVersionUID = -2850095011686914405L; - - public CodecException() { - } - - public CodecException(String message) { - super(message); - } - - public CodecException(Throwable t) { - super(t); - } - - public CodecException(String message, Throwable t) { - super(message, t); - } -} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/codec/Decoder.java b/hbase-common/src/main/java/org/apache/hbase/io/codec/Decoder.java deleted file mode 100644 index d9cf79b..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/codec/Decoder.java +++ /dev/null @@ -1,15 +0,0 @@ -package org.apache.hbase.io.codec; - -import org.apache.hbase.Cell; - -/** - * Cell decoder - */ -public interface Decoder { - /** - * Decode next cell. - * @return Next decoded cell or null if finished decoding - * @throws CodecException - */ - Cell decode() throws CodecException; -} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/codec/Encoder.java b/hbase-common/src/main/java/org/apache/hbase/io/codec/Encoder.java deleted file mode 100644 index e9bef80..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/codec/Encoder.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.apache.hbase.io.codec; - -import org.apache.hbase.Cell; - -/** - * Cell encoder. - */ -public interface Encoder { - // TODO: org.apache.avro.io.Encoder is like this only does java primitive types, not Cells, - // but it does do 'fixed' bytes.... could serialize Cell and pass these... ugh.. - // wouldn't buy us much. - // TODO: Do a context for encoder and then another for decoder because they - // will have different stuff? Or is that something not in this Interface (I had - // it in here and then removed it all) - /** - * Implementation must copy the entire state of the Cell. If the passed Cell is modified - * immediately after the encode method returns, the modifications must have absolutely no effect - * on the copy of the Cell that was added to the encoder. - * @param cell Cell to encode. - * @return The passed cell - * @throws CodecException - */ - Cell encode(Cell cell) throws CodecException; - - /** - * Finish up the encoding. Add END-OF-ENCODING markers or flush the - * stream, etc. - * You cannot call {@link #encode(Cell)} after invoking this method. - * @throws CodecException - */ - void finish() throws CodecException; -} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/codec/OldSchoolKeyValueDecoder.java b/hbase-common/src/main/java/org/apache/hbase/io/codec/OldSchoolKeyValueDecoder.java deleted file mode 100644 index 2eb72dc..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/codec/OldSchoolKeyValueDecoder.java +++ /dev/null @@ -1,43 +0,0 @@ -package org.apache.hbase.io.codec; - -import java.io.DataInputStream; -import java.io.IOException; - -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hbase.Cell; - -/** - * @see OldSchoolKeyValueEncoder - */ -public class OldSchoolKeyValueDecoder implements Decoder { - private final DataInputStream in; - // If true, this decoder is finished decoding. - private boolean finished = false; - - public OldSchoolKeyValueDecoder(final DataInputStream in) { - this.in = in; - } - - @Override - public Cell decode() throws CodecException { - Cell cell = null; - if (!this.finished) { - int length; - try { - length = this.in.readInt(); - } catch (IOException e) { - throw new CodecException(e); - } - if (length == OldSchoolKeyValueEncoder.END_OF_KEYVALUES) { - this.finished = true; - } else { - try { - cell = KeyValue.create(length, this.in); - } catch (IOException e) { - throw new CodecException(e); - } - } - } - return cell; - } -} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hbase/io/codec/OldSchoolKeyValueEncoder.java b/hbase-common/src/main/java/org/apache/hbase/io/codec/OldSchoolKeyValueEncoder.java deleted file mode 100644 index dffac45..0000000 --- a/hbase-common/src/main/java/org/apache/hbase/io/codec/OldSchoolKeyValueEncoder.java +++ /dev/null @@ -1,68 +0,0 @@ -package org.apache.hbase.io.codec; - -import java.io.DataOutputStream; -import java.io.IOException; - -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hbase.Cell; - -/** - * Encodes by casting Cell to KeyValue and writing out the backing array with a length prefix. - * This is how KVs were serialized in Puts, Deletes and Results pre-0.96. Its what would - * happen if you called the Writable#write KeyValue implementation. This encoder will fail - * if the passed Cell is not an old school pre-0.96 KeyValue. Does not copy bytes writing. - * It just writes them direct to the passed stream. When {@link #finish} is called, - * we write out an End-Of-KeyValues marker to the stream. If you wrote two KeyValues to - * this encoder, it would look like this in the stream: - *

- * length-of-KeyValue1 // A java int with the length of KeyValue1 backing array
- * KeyValue1 backing array
- * length-of-KeyValue2
- * KeyValue2 backing array
- * length-of-zero // A java int whose value is 0; marks end of the encoded section
- * 
- * @see OldSchoolKeyValueDecoder - */ -public class OldSchoolKeyValueEncoder implements Encoder { - // We write out an '0' int as marker that there are no more kvs when you call flush. - static final int END_OF_KEYVALUES = 0; - // Need to be able to write java types such as int and long so need DataOutput. - // Want to stream too so DataOutputStream. - private final DataOutputStream out; - // This encoder is 'done' once flush has been called because on flush we - // write out the END_OF_KEYVALUES marker. - private boolean finish = false; - - public OldSchoolKeyValueEncoder(final DataOutputStream out) { - this.out = out; - } - - @Override - public Cell encode(Cell cell) throws CodecException { - if (this.finish) throw new CodecException("Finished"); - // This is crass and will not work when KV changes - try { - KeyValue.write((KeyValue)cell, this.out); - } catch (IOException e) { - throw new CodecException(e); - } - return cell; - } - - /** - * Calling flush 'finishes' this encoder. Subsequent calls - * to {@link #write(Cell)} will throw exception. - */ - @Override - public void finish() throws CodecException { - if (this.finish) return; - this.finish = true; - // Write out an int whose value is zero as end of stream. - try { - this.out.writeInt(END_OF_KEYVALUES); - this.out.flush(); - } catch (IOException e) { - throw new CodecException(e); - } - } -} \ No newline at end of file diff --git a/hbase-common/src/test/java/org/apache/hbase/codec/TestCellCodec.java b/hbase-common/src/test/java/org/apache/hbase/codec/TestCellCodec.java new file mode 100644 index 0000000..d83b01b --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hbase/codec/TestCellCodec.java @@ -0,0 +1,93 @@ +package org.apache.hbase.codec; + +import static org.junit.Assert.*; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hbase.Cell; +import org.apache.hbase.CellComparator; +import org.apache.hbase.codec.CellDecoder; +import org.apache.hbase.codec.CellEncoder; +import org.junit.Test; + +import com.google.common.io.CountingInputStream; +import com.google.common.io.CountingOutputStream; + +public class TestCellCodec { + + @Test + public void testEmptyWorks() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + CountingOutputStream cos = new CountingOutputStream(baos); + DataOutputStream dos = new DataOutputStream(cos); + CellEncoder encoder = new CellEncoder(dos); + encoder.flush(); + dos.close(); + long offset = cos.getCount(); + assertEquals(0 + 1 /*CellEncoder.END_OF_CELLS*/, offset); + CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); + DataInputStream dis = new DataInputStream(cis); + CellDecoder decoder = new CellDecoder(dis); + assertFalse(decoder.next()); + dis.close(); + assertEquals(0 + 1 /*CellEncoder.END_OF_CELLS*/, cis.getCount()); + } + + @Test + public void testOne() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + CountingOutputStream cos = new CountingOutputStream(baos); + DataOutputStream dos = new DataOutputStream(cos); + CellEncoder encoder = new CellEncoder(dos); + final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); + encoder.write(kv); + encoder.flush(); + dos.close(); + long offset = cos.getCount(); + CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); + DataInputStream dis = new DataInputStream(cis); + CellDecoder decoder = new CellDecoder(dis); + assertTrue(decoder.next()); // First read should pull in the KV + assertFalse(decoder.next()); // Second read should trip over the end-of-stream marker and return false + dis.close(); + assertEquals(offset, cis.getCount()); + } + + @Test + public void testThree() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + CountingOutputStream cos = new CountingOutputStream(baos); + DataOutputStream dos = new DataOutputStream(cos); + CellEncoder encoder = new CellEncoder(dos); + final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1")); + final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2")); + final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3")); + encoder.write(kv1); + encoder.write(kv2); + encoder.write(kv3); + encoder.flush(); + dos.close(); + long offset = cos.getCount(); + CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); + DataInputStream dis = new DataInputStream(cis); + CellDecoder decoder = new CellDecoder(dis); + assertTrue(decoder.next()); + Cell c = decoder.getCurrent(); + assertTrue(CellComparator.equals(c, kv1)); + assertTrue(decoder.next()); + c = decoder.getCurrent(); + assertTrue(CellComparator.equals(c, kv2)); + assertTrue(decoder.next()); + c = decoder.getCurrent(); + assertTrue(CellComparator.equals(c, kv3)); + assertFalse(decoder.next()); + dis.close(); + assertEquals(offset, cis.getCount()); + } +} \ No newline at end of file diff --git a/hbase-common/src/test/java/org/apache/hbase/codec/TestOldSchoolKeyValueCodec.java b/hbase-common/src/test/java/org/apache/hbase/codec/TestOldSchoolKeyValueCodec.java new file mode 100644 index 0000000..94b1d18 --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hbase/codec/TestOldSchoolKeyValueCodec.java @@ -0,0 +1,98 @@ +package org.apache.hbase.codec; + +import static org.junit.Assert.*; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hbase.codec.OldSchoolKeyValueDecoder; +import org.apache.hbase.codec.OldSchoolKeyValueEncoder; +import org.junit.Test; + +import com.google.common.io.CountingInputStream; +import com.google.common.io.CountingOutputStream; + +public class TestOldSchoolKeyValueCodec { + + @Test + public void testEmptyWorks() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + CountingOutputStream cos = new CountingOutputStream(baos); + DataOutputStream dos = new DataOutputStream(cos); + OldSchoolKeyValueEncoder encoder = new OldSchoolKeyValueEncoder(dos); + encoder.flush(); + dos.close(); + long offset = cos.getCount(); + assertEquals(0 + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, offset); + CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); + DataInputStream dis = new DataInputStream(cis); + OldSchoolKeyValueDecoder decoder = new OldSchoolKeyValueDecoder(dis); + assertFalse(decoder.next()); + dis.close(); + assertEquals(0 + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, + cis.getCount()); + } + + @Test + public void testOne() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + CountingOutputStream cos = new CountingOutputStream(baos); + DataOutputStream dos = new DataOutputStream(cos); + OldSchoolKeyValueEncoder encoder = new OldSchoolKeyValueEncoder(dos); + final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); + final long length = kv.getLength() + Bytes.SIZEOF_INT; + encoder.write(kv); + encoder.flush(); + dos.close(); + long offset = cos.getCount(); + assertEquals(length + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, offset); + CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); + DataInputStream dis = new DataInputStream(cis); + OldSchoolKeyValueDecoder decoder = new OldSchoolKeyValueDecoder(dis); + assertTrue(decoder.next()); // First read should pull in the KV + assertFalse(decoder.next()); // Second read should trip over the end-of-stream marker and return false + dis.close(); + assertEquals(length + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, + cis.getCount()); + } + + @Test + public void testThree() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + CountingOutputStream cos = new CountingOutputStream(baos); + DataOutputStream dos = new DataOutputStream(cos); + OldSchoolKeyValueEncoder encoder = new OldSchoolKeyValueEncoder(dos); + final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1")); + final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2")); + final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3")); + final long length = kv1.getLength() + Bytes.SIZEOF_INT; + encoder.write(kv1); + encoder.write(kv2); + encoder.write(kv3); + encoder.flush(); + dos.close(); + long offset = cos.getCount(); + assertEquals(length * 3 + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, offset); + CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); + DataInputStream dis = new DataInputStream(cis); + OldSchoolKeyValueDecoder decoder = new OldSchoolKeyValueDecoder(dis); + assertTrue(decoder.next()); + KeyValue kv = (KeyValue)decoder.getCurrent(); + assertTrue(kv1.equals(kv)); + assertTrue(decoder.next()); + kv = (KeyValue)decoder.getCurrent(); + assertTrue(kv2.equals(kv)); + assertTrue(decoder.next()); + kv = (KeyValue)decoder.getCurrent(); + assertTrue(kv3.equals(kv)); + assertFalse(decoder.next()); + dis.close(); + assertEquals((length * 3) + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, + cis.getCount()); + } +} \ No newline at end of file diff --git a/hbase-common/src/test/java/org/apache/hbase/io/codec/TestCellCodec.java b/hbase-common/src/test/java/org/apache/hbase/io/codec/TestCellCodec.java deleted file mode 100644 index 2cd6355..0000000 --- a/hbase-common/src/test/java/org/apache/hbase/io/codec/TestCellCodec.java +++ /dev/null @@ -1,88 +0,0 @@ -package org.apache.hbase.io.codec; - -import static org.junit.Assert.*; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hbase.Cell; -import org.apache.hbase.CellComparator; -import org.junit.Test; - -import com.google.common.io.CountingInputStream; -import com.google.common.io.CountingOutputStream; - -public class TestCellCodec { - - @Test - public void testEmptyWorks() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - CountingOutputStream cos = new CountingOutputStream(baos); - DataOutputStream dos = new DataOutputStream(cos); - CellEncoder encoder = new CellEncoder(dos); - encoder.finish(); - dos.close(); - long offset = cos.getCount(); - assertEquals(0 + 1 /*CellEncoder.END_OF_CELLS*/, offset); - CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); - DataInputStream dis = new DataInputStream(cis); - CellDecoder decoder = new CellDecoder(dis); - assertTrue(decoder.decode() == null); - dis.close(); - assertEquals(0 + 1 /*CellEncoder.END_OF_CELLS*/, cis.getCount()); - } - - @Test - public void testOne() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - CountingOutputStream cos = new CountingOutputStream(baos); - DataOutputStream dos = new DataOutputStream(cos); - CellEncoder encoder = new CellEncoder(dos); - final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); - encoder.encode(kv); - encoder.finish(); - dos.close(); - long offset = cos.getCount(); - CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); - DataInputStream dis = new DataInputStream(cis); - CellDecoder decoder = new CellDecoder(dis); - assertTrue(decoder.decode() != null); // First read should pull in the KV - assertTrue(decoder.decode() == null); // Second read should trip over the end-of-stream marker and return false - dis.close(); - assertEquals(offset, cis.getCount()); - } - - @Test - public void testThree() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - CountingOutputStream cos = new CountingOutputStream(baos); - DataOutputStream dos = new DataOutputStream(cos); - CellEncoder encoder = new CellEncoder(dos); - final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1")); - final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2")); - final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3")); - encoder.encode(kv1); - encoder.encode(kv2); - encoder.encode(kv3); - encoder.finish(); - dos.close(); - long offset = cos.getCount(); - CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); - DataInputStream dis = new DataInputStream(cis); - CellDecoder decoder = new CellDecoder(dis); - Cell c = decoder.decode(); - assertTrue(CellComparator.equals(c, kv1)); - c = decoder.decode(); - assertTrue(CellComparator.equals(c, kv2)); - c = decoder.decode(); - assertTrue(CellComparator.equals(c, kv3)); - assertTrue(decoder.decode() == null); - dis.close(); - assertEquals(offset, cis.getCount()); - } -} \ No newline at end of file diff --git a/hbase-common/src/test/java/org/apache/hbase/io/codec/TestOldSchoolKeyValueCodec.java b/hbase-common/src/test/java/org/apache/hbase/io/codec/TestOldSchoolKeyValueCodec.java deleted file mode 100644 index 6af2542..0000000 --- a/hbase-common/src/test/java/org/apache/hbase/io/codec/TestOldSchoolKeyValueCodec.java +++ /dev/null @@ -1,93 +0,0 @@ -package org.apache.hbase.io.codec; - -import static org.junit.Assert.*; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Test; - -import com.google.common.io.CountingInputStream; -import com.google.common.io.CountingOutputStream; - -public class TestOldSchoolKeyValueCodec { - - @Test - public void testEmptyWorks() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - CountingOutputStream cos = new CountingOutputStream(baos); - DataOutputStream dos = new DataOutputStream(cos); - OldSchoolKeyValueEncoder encoder = new OldSchoolKeyValueEncoder(dos); - encoder.finish(); - dos.close(); - long offset = cos.getCount(); - assertEquals(0 + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, offset); - CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); - DataInputStream dis = new DataInputStream(cis); - OldSchoolKeyValueDecoder decoder = new OldSchoolKeyValueDecoder(dis); - assertTrue(decoder.decode() == null); - dis.close(); - assertEquals(0 + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, - cis.getCount()); - } - - @Test - public void testOne() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - CountingOutputStream cos = new CountingOutputStream(baos); - DataOutputStream dos = new DataOutputStream(cos); - OldSchoolKeyValueEncoder encoder = new OldSchoolKeyValueEncoder(dos); - final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); - final long length = kv.getLength() + Bytes.SIZEOF_INT; - encoder.encode(kv); - encoder.finish(); - dos.close(); - long offset = cos.getCount(); - assertEquals(length + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, offset); - CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); - DataInputStream dis = new DataInputStream(cis); - OldSchoolKeyValueDecoder decoder = new OldSchoolKeyValueDecoder(dis); - assertTrue(decoder.decode() != null); // First read should pull in the KV - assertTrue(decoder.decode() == null); // Second read should trip over the end-of-stream marker and return false - dis.close(); - assertEquals(length + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, - cis.getCount()); - } - - @Test - public void testThree() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - CountingOutputStream cos = new CountingOutputStream(baos); - DataOutputStream dos = new DataOutputStream(cos); - OldSchoolKeyValueEncoder encoder = new OldSchoolKeyValueEncoder(dos); - final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1")); - final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2")); - final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3")); - final long length = kv1.getLength() + Bytes.SIZEOF_INT; - encoder.encode(kv1); - encoder.encode(kv2); - encoder.encode(kv3); - encoder.finish(); - dos.close(); - long offset = cos.getCount(); - assertEquals(length * 3 + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, offset); - CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); - DataInputStream dis = new DataInputStream(cis); - OldSchoolKeyValueDecoder decoder = new OldSchoolKeyValueDecoder(dis); - KeyValue kv = (KeyValue)decoder.decode(); - assertTrue(kv1.equals(kv)); - kv = (KeyValue)decoder.decode(); - assertTrue(kv2.equals(kv)); - kv = (KeyValue)decoder.decode(); - assertTrue(kv3.equals(kv)); - assertTrue(decoder.decode() == null); - dis.close(); - assertEquals((length * 3) + Bytes.SIZEOF_INT /*OldSchoolKeyValueEncoder.END_OF_KEYVALUES*/, - cis.getCount()); - } -} \ No newline at end of file