in
+ * @param in Where to read bytes from. Creates a byte array to hold the KeyValue
+ * backing bytes copied from the steam.
+ * @return KeyValue created by deserializing from in OR if we find a length
+ * of zero, we will return null which can be useful marking a stream as done.
* @throws IOException
*/
public static KeyValue create(final DataInput in) throws IOException {
@@ -2311,10 +2316,12 @@
* Create a KeyValue reading length from in
* @param length
* @param in
- * @return Created KeyValue
+ * @return Created KeyValue OR if we find a length of zero, we will return null which
+ * can be useful marking a stream as done.
* @throws IOException
*/
public static KeyValue create(int length, final DataInput in) throws IOException {
+ if (length == 0) return null;
// This is how the old Writables.readFrom used to deserialize. Didn't even vint.
byte [] bytes = new byte[length];
in.readFully(bytes);
@@ -2322,6 +2329,23 @@
}
/**
+ * Create a KeyValue reading from the raw InputStream.
+ * @param in
+ * @return Created KeyValue OR if we find a length of zero, we will return null which
+ * can be useful marking a stream as done.
+ * @throws IOException
+ */
+ public static KeyValue lowLevelCreate(final InputStream in) throws IOException {
+ byte [] intBytes = new byte[Bytes.SIZEOF_INT];
+ int length = in.read(intBytes);
+ if (length == 0) return null;
+ if (length != intBytes.length) throw new IOException("Failed read of int length " + length);
+ byte [] bytes = new byte[Bytes.toInt(intBytes)];
+ IOUtils.readFully(in, bytes, 0, bytes.length);
+ return new KeyValue(bytes, 0, bytes.length);
+ }
+
+ /**
* Write out a KeyValue in the manner in which we used to when KeyValue was a Writable.
* @param kv
* @param out
@@ -2339,6 +2363,24 @@
}
/**
+ * Write out a KeyValue in the manner in which we used to when KeyValue was a Writable but do
+ * not require a {@link DataOutput}, just take plain {@link OutputStream}
+ * @param kv
+ * @param out
+ * @return Length written on stream
+ * @throws IOException
+ * @see #create(DataInput) for the inverse function
+ * @see #write(KeyValue, DataOutput)
+ */
+ public static long lowLevelWrite(final KeyValue kv, final OutputStream out) throws IOException {
+ int length = kv.getLength();
+ // This does same as DataOuput#writeInt (big-endian, etc.)
+ out.write(Bytes.toBytes(length));
+ out.write(kv.getBuffer(), kv.getOffset(), length);
+ return length + Bytes.SIZEOF_INT;
+ }
+
+ /**
* Compare key portion of a {@link KeyValue} for keys in -ROOT-
* table.
*/
Index: hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
===================================================================
--- hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java (working copy)
@@ -26,7 +26,7 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.IterableUtils;
import org.apache.hadoop.hbase.util.Strings;
-import org.apache.hbase.cell.CellComparator;
+import org.apache.hbase.CellComparator;
import com.google.common.collect.Lists;
Index: hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTool.java
===================================================================
--- hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTool.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTool.java (working copy)
@@ -28,7 +28,7 @@
import org.apache.hadoop.hbase.util.IterableUtils;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellTool;
+import org.apache.hbase.CellTool;
/**
* static convenience methods for dealing with KeyValues and collections of KeyValues
Index: hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
===================================================================
--- hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java (working copy)
@@ -628,7 +628,9 @@
}
/**
- * Convert an int value to a byte array
+ * Convert an int value to a byte array. Big-endian. Same as what DataOutputStream.writeInt
+ * does.
+ *
* @param val value
* @return the byte array
*/
Index: hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
===================================================================
--- hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java (working copy)
@@ -16,7 +16,6 @@
*/
package org.apache.hadoop.hbase.util.test;
-import java.util.Map;
import java.util.Random;
import org.apache.hadoop.hbase.util.Bytes;
Index: hbase-common/src/main/java/org/apache/hbase/Cell.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/Cell.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hbase/Cell.java (working copy)
@@ -20,7 +20,6 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hbase.cell.CellTool;
/**
Index: hbase-common/src/main/java/org/apache/hbase/CellComparator.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/CellComparator.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/CellComparator.java (working copy)
@@ -0,0 +1,194 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase;
+
+import java.io.Serializable;
+import java.util.Comparator;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import com.google.common.primitives.Longs;
+
+/**
+ * Compare two traditional HBase cells.
+ *
+ * Note: This comparator is not valid for -ROOT- and .META. tables.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class CellComparator implements Comparator| , Serializable{
+ private static final long serialVersionUID = -8760041766259623329L;
+
+ @Override
+ public int compare(Cell a, Cell b) {
+ return compareStatic(a, b);
+ }
+
+
+ public static int compareStatic(Cell a, Cell b) {
+ //row
+ int c = Bytes.compareTo(
+ a.getRowArray(), a.getRowOffset(), a.getRowLength(),
+ b.getRowArray(), b.getRowOffset(), b.getRowLength());
+ if (c != 0) return c;
+
+ //family
+ c = Bytes.compareTo(
+ a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(),
+ b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength());
+ if (c != 0) return c;
+
+ //qualifier
+ c = Bytes.compareTo(
+ a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(),
+ b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength());
+ if (c != 0) return c;
+
+ //timestamp: later sorts first
+ c = -Longs.compare(a.getTimestamp(), b.getTimestamp());
+ if (c != 0) return c;
+
+ //type
+ c = (0xff & a.getTypeByte()) - (0xff & b.getTypeByte());
+ if (c != 0) return c;
+
+ //mvccVersion: later sorts first
+ return -Longs.compare(a.getMvccVersion(), b.getMvccVersion());
+ }
+
+
+ /**************** equals ****************************/
+
+ public static boolean equals(Cell a, Cell b){
+ return equalsRow(a, b)
+ && equalsFamily(a, b)
+ && equalsQualifier(a, b)
+ && equalsTimestamp(a, b)
+ && equalsType(a, b);
+ }
+
+ public static boolean equalsRow(Cell a, Cell b){
+ return Bytes.equals(
+ a.getRowArray(), a.getRowOffset(), a.getRowLength(),
+ b.getRowArray(), b.getRowOffset(), b.getRowLength());
+ }
+
+ public static boolean equalsFamily(Cell a, Cell b){
+ return Bytes.equals(
+ a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(),
+ b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength());
+ }
+
+ public static boolean equalsQualifier(Cell a, Cell b){
+ return Bytes.equals(
+ a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(),
+ b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength());
+ }
+
+ public static boolean equalsTimestamp(Cell a, Cell b){
+ return a.getTimestamp() == b.getTimestamp();
+ }
+
+ public static boolean equalsType(Cell a, Cell b){
+ return a.getTypeByte() == b.getTypeByte();
+ }
+
+
+ /********************* hashCode ************************/
+
+ /**
+ * Returns a hash code that is always the same for two Cells having a matching equals(..) result.
+ * Currently does not guard against nulls, but it could if necessary.
+ */
+ public static int hashCode(Cell cell){
+ if (cell == null) {// return 0 for empty Cell
+ return 0;
+ }
+
+ //pre-calculate the 3 hashes made of byte ranges
+ int rowHash = Bytes.hashCode(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
+ int familyHash = Bytes.hashCode(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
+ int qualifierHash = Bytes.hashCode(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
+
+ //combine the 6 sub-hashes
+ int hash = 31 * rowHash + familyHash;
+ hash = 31 * hash + qualifierHash;
+ hash = 31 * hash + (int)cell.getTimestamp();
+ hash = 31 * hash + cell.getTypeByte();
+ hash = 31 * hash + (int)cell.getMvccVersion();
+ return hash;
+ }
+
+
+ /******************** lengths *************************/
+
+ public static boolean areKeyLengthsEqual(Cell a, Cell b) {
+ return a.getRowLength() == b.getRowLength()
+ && a.getFamilyLength() == b.getFamilyLength()
+ && a.getQualifierLength() == b.getQualifierLength();
+ }
+
+ public static boolean areRowLengthsEqual(Cell a, Cell b) {
+ return a.getRowLength() == b.getRowLength();
+ }
+
+
+ /***************** special cases ****************************/
+
+ /**
+ * special case for KeyValue.equals
+ */
+ private static int compareStaticIgnoreMvccVersion(Cell a, Cell b) {
+ //row
+ int c = Bytes.compareTo(
+ a.getRowArray(), a.getRowOffset(), a.getRowLength(),
+ b.getRowArray(), b.getRowOffset(), b.getRowLength());
+ if (c != 0) return c;
+
+ //family
+ c = Bytes.compareTo(
+ a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(),
+ b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength());
+ if (c != 0) return c;
+
+ //qualifier
+ c = Bytes.compareTo(
+ a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(),
+ b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength());
+ if (c != 0) return c;
+
+ //timestamp: later sorts first
+ c = -Longs.compare(a.getTimestamp(), b.getTimestamp());
+ if (c != 0) return c;
+
+ //type
+ c = (0xff & a.getTypeByte()) - (0xff & b.getTypeByte());
+ return c;
+ }
+
+ /**
+ * special case for KeyValue.equals
+ */
+ public static boolean equalsIgnoreMvccVersion(Cell a, Cell b){
+ return 0 == compareStaticIgnoreMvccVersion(a, b);
+ }
+
+}
Index: hbase-common/src/main/java/org/apache/hbase/CellScanner.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/CellScanner.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/CellScanner.java (working copy)
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hbase.Cell;
+
+/**
+ * An interface for iterating through a sequence of cells. Similar to Java's Iterator, but without
+ * the hasNext() or remove() methods. The hasNext() method is problematic because it may require
+ * actually loading the next object, which in turn requires storing the previous object somewhere.
+ *
+ * The core data block decoder should be as fast as possible, so we push the complexity and
+ * performance expense of concurrently tracking multiple cells to layers above the CellScanner.
+ *
+ * The {@link #get()} method will return a reference to a Cell implementation. This reference may
+ * or may not point to a reusable cell implementation, so users of the CellScanner should not, for
+ * example, accumulate a List of Cells. All of the references may point to the same object, which
+ * would be the latest state of the underlying Cell. In short, the Cell is mutable. Use
+ * {@link #getDeepCopy()} if you want a Cell that does not hold references (if the data is
+ * encoded or compressed, the call to {@link #getDeepCopy()} will cost more than a {@link #get()}.
+ *
+ * Typical usage:
+ *
+ *
+ * while (scanner.next()) {
+ * Cell cell = scanner.get();
+ * // do something
+ * }
+ *
+ * Often used reading {@link Cell}s written by {@Link org.apache.hbase.io.CellOutputStream}.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public interface CellScanner {
+ /**
+ * @return the current Cell which may be mutable
+ */
+ Cell get();
+
+ /**
+ * @return Get a deep copy of the current Cell or null if no current Cell
+ */
+ Cell getDeepCopy();
+
+ /**
+ * Advance the scanner 1 cell.
+ * @return true if the next cell is found and {@link #get()} will return a valid Cell
+ */
+ boolean next();
+}
\ No newline at end of file
Index: hbase-common/src/main/java/org/apache/hbase/CellScannerable.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/CellScannerable.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/CellScannerable.java (working copy)
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase;
+
+/**
+ * Implementer can return a CellScanner over its Cell content.
+ * Class name is ugly but mimicing java.util.Iterable only we are about the dumber
+ * CellScanner rather than say Iterator. See CellScanner class comment for why we god
+ * dumber than java.util.Iterator.
+ */
+public interface CellScannerable {
+ /**
+ * @return A CellScanner over the contained {@link Cell}s
+ */
+ CellScanner cellScanner();
+}
\ No newline at end of file
Index: hbase-common/src/main/java/org/apache/hbase/CellTool.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/CellTool.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/CellTool.java (working copy)
@@ -0,0 +1,260 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase;
+
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.NavigableMap;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.ByteRange;
+import org.apache.hadoop.hbase.KeyValue;
+
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public final class CellTool {
+
+ /******************* ByteRange *******************************/
+
+ public static ByteRange fillRowRange(Cell cell, ByteRange range) {
+ return range.set(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
+ }
+
+ public static ByteRange fillFamilyRange(Cell cell, ByteRange range) {
+ return range.set(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
+ }
+
+ public static ByteRange fillQualifierRange(Cell cell, ByteRange range) {
+ return range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
+ cell.getQualifierLength());
+ }
+
+
+ /***************** get individual arrays for tests ************/
+
+ public static byte[] getRowArray(Cell cell){
+ byte[] output = new byte[cell.getRowLength()];
+ copyRowTo(cell, output, 0);
+ return output;
+ }
+
+ public static byte[] getFamilyArray(Cell cell){
+ byte[] output = new byte[cell.getFamilyLength()];
+ copyFamilyTo(cell, output, 0);
+ return output;
+ }
+
+ public static byte[] getQualifierArray(Cell cell){
+ byte[] output = new byte[cell.getQualifierLength()];
+ copyQualifierTo(cell, output, 0);
+ return output;
+ }
+
+ public static byte[] getValueArray(Cell cell){
+ byte[] output = new byte[cell.getValueLength()];
+ copyValueTo(cell, output, 0);
+ return output;
+ }
+
+
+ /******************** copyTo **********************************/
+
+ public static int copyRowTo(Cell cell, byte[] destination, int destinationOffset) {
+ System.arraycopy(cell.getRowArray(), cell.getRowOffset(), destination, destinationOffset,
+ cell.getRowLength());
+ return destinationOffset + cell.getRowLength();
+ }
+
+ public static int copyFamilyTo(Cell cell, byte[] destination, int destinationOffset) {
+ System.arraycopy(cell.getFamilyArray(), cell.getFamilyOffset(), destination, destinationOffset,
+ cell.getFamilyLength());
+ return destinationOffset + cell.getFamilyLength();
+ }
+
+ public static int copyQualifierTo(Cell cell, byte[] destination, int destinationOffset) {
+ System.arraycopy(cell.getQualifierArray(), cell.getQualifierOffset(), destination,
+ destinationOffset, cell.getQualifierLength());
+ return destinationOffset + cell.getQualifierLength();
+ }
+
+ public static int copyValueTo(Cell cell, byte[] destination, int destinationOffset) {
+ System.arraycopy(cell.getValueArray(), cell.getValueOffset(), destination, destinationOffset,
+ cell.getValueLength());
+ return destinationOffset + cell.getValueLength();
+ }
+
+
+ /********************* misc *************************************/
+
+ public static byte getRowByte(Cell cell, int index) {
+ return cell.getRowArray()[cell.getRowOffset() + index];
+ }
+
+ public static ByteBuffer getValueBufferShallowCopy(Cell cell) {
+ ByteBuffer buffer = ByteBuffer.wrap(cell.getValueArray(), cell.getValueOffset(),
+ cell.getValueLength());
+// buffer.position(buffer.limit());//make it look as if value was appended
+ return buffer;
+ }
+
+ public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier,
+ final long timestamp, final byte type, final byte [] value) {
+ // I need a Cell Factory here. Using KeyValue for now. TODO.
+ // TODO: Make a new Cell implementation that just carries these
+ // byte arrays.
+ return new KeyValue(row, family, qualifier, timestamp,
+ KeyValue.Type.codeToType(type), value);
+ }
+
+ /**
+ * @param cellScannerables
+ * @return CellScanner interface over cellIterables
+ */
+ public static CellScanner createCellScanner(final List cellScannerables) {
+ return new CellScanner() {
+ private final Iterator iterator = cellScannerables.iterator();
+ private CellScanner cellScanner = null;
+
+ @Override
+ public Cell get() {
+ return this.cellScanner != null? this.cellScanner.get(): null;
+ }
+
+ @Override
+ public Cell getDeepCopy() {
+ // TODO: Fix to do a deep copy. Does the CellIterable passed have to be against deep copies?
+ return get();
+ }
+
+ @Override
+ public boolean next() {
+ if (this.cellScanner == null) {
+ if (!this.iterator.hasNext()) return false;
+ this.cellScanner = this.iterator.next().cellScanner();
+ }
+ if (this.cellScanner.next()) return true;
+ this.cellScanner = null;
+ return next();
+ }
+ };
+ }
+
+ /**
+ * @param cellIterable
+ * @return CellScanner interface over cellIterable
+ */
+ public static CellScanner createCellScanner(final Iterable cellIterable) {
+ return createCellScanner(cellIterable.iterator());
+ }
+
+ /**
+ * @param cells
+ * @return CellScanner interface over cellIterable
+ */
+ public static CellScanner createCellScanner(final Iterator cells) {
+ return new CellScanner() {
+ private final Iterator iterator = cells;
+
+ @Override
+ public Cell get() {
+ return this.iterator.next();
+ }
+
+ @Override
+ public Cell getDeepCopy() {
+ // TODO: Fix to do a deep copy. Does the CellIterable passed have to be against deep copies?
+ return get();
+ }
+
+ @Override
+ public boolean next() {
+ return this.iterator.hasNext();
+ }
+ };
+ }
+
+ /**
+ * @param cells
+ * @return CellScanner interface over cellIterable
+ */
+ public static CellScanner createCellScanner(final Cell[] cellArray) {
+ return new CellScanner() {
+ private final Cell [] cells = cellArray;
+ private int index = -1;
+
+ @Override
+ public Cell get() {
+ return this.cells[index];
+ }
+
+ @Override
+ public Cell getDeepCopy() {
+ // TODO: Fix to do a deep copy. Does the CellIterable passed have to be against deep copies?
+ return get();
+ }
+
+ @Override
+ public boolean next() {
+ return ++index < this.cells.length;
+ }
+ };
+ }
+
+ /**
+ * Flatten the map of cells out under the CellScanner
+ * @param cells
+ * @return CellScanner interface over cellIterable
+ */
+ public static CellScanner createCellScanner(final NavigableMap> map) {
+ return new CellScanner() {
+ private final Iterator>> entries = map.entrySet().iterator();
+ private Iterator currentIterator = null;
+ private Cell currentCell;
+
+ @Override
+ public Cell get() {
+ return this.currentCell;
+ }
+
+ @Override
+ public Cell getDeepCopy() {
+ // TODO: Fix to do a deep copy. Does the CellIterable passed have to be against deep copies?
+ return get();
+ }
+
+ @Override
+ public boolean next() {
+ if (this.currentIterator == null) {
+ if (!this.entries.hasNext()) return false;
+ this.currentIterator = this.entries.next().getValue().iterator();
+ }
+ if (this.currentIterator.hasNext()) {
+ this.currentCell = this.currentIterator.next();
+ return true;
+ }
+ this.currentCell = null;
+ this.currentIterator = null;
+ return next();
+ }
+ };
+ }
+}
\ No newline at end of file
Index: hbase-common/src/main/java/org/apache/hbase/cell/CellComparator.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/cell/CellComparator.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hbase/cell/CellComparator.java (working copy)
@@ -1,195 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hbase.cell;
-
-import java.io.Serializable;
-import java.util.Comparator;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hbase.Cell;
-
-import com.google.common.primitives.Longs;
-
-/**
- * Compare two traditional HBase cells.
- *
- * Note: This comparator is not valid for -ROOT- and .META. tables.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class CellComparator implements Comparator, Serializable{
- private static final long serialVersionUID = -8760041766259623329L;
-
- @Override
- public int compare(Cell a, Cell b) {
- return compareStatic(a, b);
- }
-
-
- public static int compareStatic(Cell a, Cell b) {
- //row
- int c = Bytes.compareTo(
- a.getRowArray(), a.getRowOffset(), a.getRowLength(),
- b.getRowArray(), b.getRowOffset(), b.getRowLength());
- if (c != 0) return c;
-
- //family
- c = Bytes.compareTo(
- a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(),
- b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength());
- if (c != 0) return c;
-
- //qualifier
- c = Bytes.compareTo(
- a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(),
- b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength());
- if (c != 0) return c;
-
- //timestamp: later sorts first
- c = -Longs.compare(a.getTimestamp(), b.getTimestamp());
- if (c != 0) return c;
-
- //type
- c = (0xff & a.getTypeByte()) - (0xff & b.getTypeByte());
- if (c != 0) return c;
-
- //mvccVersion: later sorts first
- return -Longs.compare(a.getMvccVersion(), b.getMvccVersion());
- }
-
-
- /**************** equals ****************************/
-
- public static boolean equals(Cell a, Cell b){
- return equalsRow(a, b)
- && equalsFamily(a, b)
- && equalsQualifier(a, b)
- && equalsTimestamp(a, b)
- && equalsType(a, b);
- }
-
- public static boolean equalsRow(Cell a, Cell b){
- return Bytes.equals(
- a.getRowArray(), a.getRowOffset(), a.getRowLength(),
- b.getRowArray(), b.getRowOffset(), b.getRowLength());
- }
-
- public static boolean equalsFamily(Cell a, Cell b){
- return Bytes.equals(
- a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(),
- b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength());
- }
-
- public static boolean equalsQualifier(Cell a, Cell b){
- return Bytes.equals(
- a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(),
- b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength());
- }
-
- public static boolean equalsTimestamp(Cell a, Cell b){
- return a.getTimestamp() == b.getTimestamp();
- }
-
- public static boolean equalsType(Cell a, Cell b){
- return a.getTypeByte() == b.getTypeByte();
- }
-
-
- /********************* hashCode ************************/
-
- /**
- * Returns a hash code that is always the same for two Cells having a matching equals(..) result.
- * Currently does not guard against nulls, but it could if necessary.
- */
- public static int hashCode(Cell cell){
- if (cell == null) {// return 0 for empty Cell
- return 0;
- }
-
- //pre-calculate the 3 hashes made of byte ranges
- int rowHash = Bytes.hashCode(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
- int familyHash = Bytes.hashCode(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
- int qualifierHash = Bytes.hashCode(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
-
- //combine the 6 sub-hashes
- int hash = 31 * rowHash + familyHash;
- hash = 31 * hash + qualifierHash;
- hash = 31 * hash + (int)cell.getTimestamp();
- hash = 31 * hash + cell.getTypeByte();
- hash = 31 * hash + (int)cell.getMvccVersion();
- return hash;
- }
-
-
- /******************** lengths *************************/
-
- public static boolean areKeyLengthsEqual(Cell a, Cell b) {
- return a.getRowLength() == b.getRowLength()
- && a.getFamilyLength() == b.getFamilyLength()
- && a.getQualifierLength() == b.getQualifierLength();
- }
-
- public static boolean areRowLengthsEqual(Cell a, Cell b) {
- return a.getRowLength() == b.getRowLength();
- }
-
-
- /***************** special cases ****************************/
-
- /**
- * special case for KeyValue.equals
- */
- private static int compareStaticIgnoreMvccVersion(Cell a, Cell b) {
- //row
- int c = Bytes.compareTo(
- a.getRowArray(), a.getRowOffset(), a.getRowLength(),
- b.getRowArray(), b.getRowOffset(), b.getRowLength());
- if (c != 0) return c;
-
- //family
- c = Bytes.compareTo(
- a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(),
- b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength());
- if (c != 0) return c;
-
- //qualifier
- c = Bytes.compareTo(
- a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(),
- b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength());
- if (c != 0) return c;
-
- //timestamp: later sorts first
- c = -Longs.compare(a.getTimestamp(), b.getTimestamp());
- if (c != 0) return c;
-
- //type
- c = (0xff & a.getTypeByte()) - (0xff & b.getTypeByte());
- return c;
- }
-
- /**
- * special case for KeyValue.equals
- */
- public static boolean equalsIgnoreMvccVersion(Cell a, Cell b){
- return 0 == compareStaticIgnoreMvccVersion(a, b);
- }
-
-}
Index: hbase-common/src/main/java/org/apache/hbase/cell/CellOutputStream.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/cell/CellOutputStream.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hbase/cell/CellOutputStream.java (working copy)
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hbase.cell;
-
-import java.io.IOException;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hbase.Cell;
-
-/**
- * Accepts a stream of Cells and adds them to its internal data structure. This can be used to build
- * a block of cells during compactions and flushes, or to build a byte[] to send to the client. This
- * could be backed by a List, but more efficient implementations will append results to a
- * byte[] to eliminate overhead, and possibly encode the cells further.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public interface CellOutputStream {
-
- /**
- * Implementation must copy the entire state of the Cell. If the appended Cell is modified
- * immediately after the append method returns, the modifications must have absolutely no effect
- * on the copy of the Cell that was added to the appender. For example, calling someList.add(cell)
- * is not correct.
- */
- void write(Cell cell);
-
- /**
- * Let the implementation decide what to do. Usually means writing accumulated data into a byte[]
- * that can then be read from the implementation to be sent to disk, put in the block cache, or
- * sent over the network.
- */
- void flush() throws IOException;
-
-}
Index: hbase-common/src/main/java/org/apache/hbase/cell/CellScannerPosition.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/cell/CellScannerPosition.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hbase/cell/CellScannerPosition.java (working copy)
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hbase.cell;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * An indicator of the state of the scanner after an operation such as nextCell() or positionAt(..).
- * For example:
- *
- * - In a DataBlockScanner, the AFTER_LAST position indicates to the parent StoreFileScanner that
- * it should load the next block.
- * - In a StoreFileScanner, the AFTER_LAST position indicates that the file has been exhausted.
- * - In a RegionScanner, the AFTER_LAST position indicates that the scanner should move to the
- * next region.
- *
- */
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-public enum CellScannerPosition {
-
- /**
- * getCurrentCell() will NOT return a valid cell. Calling nextCell() will advance to the first
- * cell.
- */
- BEFORE_FIRST,
-
- /**
- * getCurrentCell() will return a valid cell, but it is not the cell requested by positionAt(..),
- * rather it is the nearest cell before the requested cell.
- */
- BEFORE,
-
- /**
- * getCurrentCell() will return a valid cell, and it is exactly the cell that was requested by
- * positionAt(..).
- */
- AT,
-
- /**
- * getCurrentCell() will return a valid cell, but it is not the cell requested by positionAt(..),
- * rather it is the nearest cell after the requested cell.
- */
- AFTER,
-
- /**
- * getCurrentCell() will NOT return a valid cell. Calling nextCell() will have no effect.
- */
- AFTER_LAST
-
-}
Index: hbase-common/src/main/java/org/apache/hbase/cell/CellTool.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/cell/CellTool.java (revision 1448760)
+++ hbase-common/src/main/java/org/apache/hbase/cell/CellTool.java (working copy)
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hbase.cell;
-
-import java.nio.ByteBuffer;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.util.ByteRange;
-import org.apache.hbase.Cell;
-
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public final class CellTool {
-
- /******************* ByteRange *******************************/
-
- public static ByteRange fillRowRange(Cell cell, ByteRange range) {
- return range.set(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
- }
-
- public static ByteRange fillFamilyRange(Cell cell, ByteRange range) {
- return range.set(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
- }
-
- public static ByteRange fillQualifierRange(Cell cell, ByteRange range) {
- return range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
- cell.getQualifierLength());
- }
-
-
- /***************** get individual arrays for tests ************/
-
- public static byte[] getRowArray(Cell cell){
- byte[] output = new byte[cell.getRowLength()];
- copyRowTo(cell, output, 0);
- return output;
- }
-
- public static byte[] getFamilyArray(Cell cell){
- byte[] output = new byte[cell.getFamilyLength()];
- copyFamilyTo(cell, output, 0);
- return output;
- }
-
- public static byte[] getQualifierArray(Cell cell){
- byte[] output = new byte[cell.getQualifierLength()];
- copyQualifierTo(cell, output, 0);
- return output;
- }
-
- public static byte[] getValueArray(Cell cell){
- byte[] output = new byte[cell.getValueLength()];
- copyValueTo(cell, output, 0);
- return output;
- }
-
-
- /******************** copyTo **********************************/
-
- public static int copyRowTo(Cell cell, byte[] destination, int destinationOffset) {
- System.arraycopy(cell.getRowArray(), cell.getRowOffset(), destination, destinationOffset,
- cell.getRowLength());
- return destinationOffset + cell.getRowLength();
- }
-
- public static int copyFamilyTo(Cell cell, byte[] destination, int destinationOffset) {
- System.arraycopy(cell.getFamilyArray(), cell.getFamilyOffset(), destination, destinationOffset,
- cell.getFamilyLength());
- return destinationOffset + cell.getFamilyLength();
- }
-
- public static int copyQualifierTo(Cell cell, byte[] destination, int destinationOffset) {
- System.arraycopy(cell.getQualifierArray(), cell.getQualifierOffset(), destination,
- destinationOffset, cell.getQualifierLength());
- return destinationOffset + cell.getQualifierLength();
- }
-
- public static int copyValueTo(Cell cell, byte[] destination, int destinationOffset) {
- System.arraycopy(cell.getValueArray(), cell.getValueOffset(), destination, destinationOffset,
- cell.getValueLength());
- return destinationOffset + cell.getValueLength();
- }
-
-
- /********************* misc *************************************/
-
- public static byte getRowByte(Cell cell, int index) {
- return cell.getRowArray()[cell.getRowOffset() + index];
- }
-
-
- /********************** KeyValue (move to KeyValueUtils) *********************/
-
- public static ByteBuffer getValueBufferShallowCopy(Cell cell) {
- ByteBuffer buffer = ByteBuffer.wrap(cell.getValueArray(), cell.getValueOffset(),
- cell.getValueLength());
-// buffer.position(buffer.limit());//make it look as if value was appended
- return buffer;
- }
-
-}
Index: hbase-common/src/main/java/org/apache/hbase/codec/BaseDecoder.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/codec/BaseDecoder.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/codec/BaseDecoder.java (working copy)
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase.codec;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.hbase.Cell;
+
+abstract class BaseDecoder implements Codec.Decoder {
+ final InputStream in;
+ private boolean hasNext = true;
+ private Cell current = null;
+
+ BaseDecoder(final InputStream in) {
+ this.in = in;
+ }
+
+ @Override
+ public boolean next() {
+ if (!this.hasNext) return this.hasNext;
+ try {
+ if (this.in.available() <= 0) {
+ this.hasNext = false;
+ return this.hasNext;
+ }
+ this.current = parseCell();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ return this.hasNext;
+ }
+
+ abstract Cell parseCell() throws IOException;
+
+ @Override
+ public Cell get() {
+ return this.current;
+ }
+
+ @Override
+ public Cell getDeepCopy() {
+ return get();
+ }
+}
Index: hbase-common/src/main/java/org/apache/hbase/codec/BaseEncoder.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/codec/BaseEncoder.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/codec/BaseEncoder.java (working copy)
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase.codec;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.hbase.Cell;
+
+abstract class BaseEncoder implements Codec.Encoder {
+ protected final OutputStream out;
+ // This encoder is 'done' once flush has been called.
+ protected boolean flushed = false;
+
+ public BaseEncoder(final OutputStream out) {
+ this.out = out;
+ }
+
+ @Override
+ public abstract void write(Cell cell) throws IOException;
+
+ void checkFlushed() throws CodecException {
+ if (this.flushed) throw new CodecException("Flushed; done");
+ }
+
+ @Override
+ public void flush() throws IOException {
+ if (this.flushed) return;
+ this.flushed = true;
+ try {
+ this.out.flush();
+ } catch (IOException e) {
+ throw new CodecException(e);
+ }
+ }
+}
\ No newline at end of file
Index: hbase-common/src/main/java/org/apache/hbase/codec/CellCodec.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/codec/CellCodec.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/codec/CellCodec.java (working copy)
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase.codec;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hbase.Cell;
+import org.apache.hbase.CellTool;
+
+/**
+ * Basic Cell codec that just writes out all the individual elements of a Cell. Uses ints
+ * delimiting all lengths. Profligate. Does not write the mvcc stamp. Use a different codec if
+ * you want that in the stream.
+ */
+public class CellCodec implements Codec {
+ class CellEncoder extends BaseEncoder {
+ CellEncoder(final OutputStream out) {
+ super(out);
+ }
+
+ @Override
+ public void write(Cell cell) throws IOException {
+ checkFlushed();
+ try {
+ // Row
+ write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
+ // Column family
+ write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
+ // Qualifier
+ write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
+ // Version
+ this.out.write(Bytes.toBytes(cell.getTimestamp()));
+ // Type
+ this.out.write(cell.getTypeByte());
+ // Value
+ write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
+ } catch (IOException e) {
+ throw new CodecException(e);
+ }
+ }
+
+ /**
+ * Write int length followed by array bytes.
+ * @param bytes
+ * @param offset
+ * @param length
+ * @throws IOException
+ */
+ private void write(final byte [] bytes, final int offset, final int length)
+ throws IOException {
+ this.out.write(Bytes.toBytes(length));
+ this.out.write(bytes, offset, length);
+ }
+ }
+
+ class CellDecoder extends BaseDecoder {
+ public CellDecoder(final InputStream in) {
+ super(in);
+ }
+
+ Cell parseCell() throws IOException {
+ byte [] row = readByteArray(this.in);
+ byte [] family = readByteArray(in);
+ byte [] qualifier = readByteArray(in);
+ byte [] longArray = new byte[Bytes.SIZEOF_LONG];
+ IOUtils.readFully(this.in, longArray);
+ long timestamp = Bytes.toLong(longArray);
+ byte type = (byte) this.in.read();
+ byte [] value = readByteArray(in);
+ return CellTool.createCell(row, family, qualifier, timestamp, type, value);
+ }
+
+ /**
+ * @return Byte array read from the stream.
+ * @throws IOException
+ */
+ private byte [] readByteArray(final InputStream in) throws IOException {
+ byte [] intArray = new byte[Bytes.SIZEOF_INT];
+ in.read(intArray, 0, Bytes.SIZEOF_INT);
+ int length = Bytes.toInt(intArray);
+ byte [] bytes = new byte [length];
+ IOUtils.readFully(in, bytes);
+ return bytes;
+ }
+ }
+
+ @Override
+ public Decoder getDecoder(InputStream is) {
+ return new CellDecoder(is);
+ }
+
+ @Override
+ public Encoder getEncoder(OutputStream os) {
+ return new CellEncoder(os);
+ }
+}
\ No newline at end of file
Index: hbase-common/src/main/java/org/apache/hbase/codec/Codec.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/codec/Codec.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/codec/Codec.java (working copy)
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase.codec;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
+import org.apache.hbase.CellScanner;
+import org.apache.hbase.io.CellOutputStream;
+
+/**
+ * Encoder/Decoder for Cell.
+ *
+ * Like {@link DataBlockEncoder} only Cell-based rather than {@link KeyValue} version 1 based
+ * and without presuming an hfile context. Intent is an Interface that will work for hfile and
+ * rpc. TODO: interfacing with DBE.
+ */
+public interface Codec {
+ /**
+ * Call flush when done. Some encoders may not put anything on the stream until flush is called.
+ * On flush, let go of any resources used by the encoder.
+ */
+ public interface Encoder extends CellOutputStream {}
+
+ /**
+ * Implementations should implicitly clean up any resources allocated when the Decoder/CellScanner
+ * runs off the end of the cell block. Do this rather than require the user call close explicitly.
+ */
+ public interface Decoder extends CellScanner {};
+
+ Decoder getDecoder(InputStream is);
+ Encoder getEncoder(OutputStream os);
+}
\ No newline at end of file
Index: hbase-common/src/main/java/org/apache/hbase/codec/CodecException.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/codec/CodecException.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/codec/CodecException.java (working copy)
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase.codec;
+
+import java.io.IOException;
+
+public class CodecException extends IOException {
+ private static final long serialVersionUID = -2850095011686914405L;
+
+ public CodecException() {
+ }
+
+ public CodecException(String message) {
+ super(message);
+ }
+
+ public CodecException(Throwable t) {
+ super(t);
+ }
+
+ public CodecException(String message, Throwable t) {
+ super(message, t);
+ }
+}
\ No newline at end of file
Index: hbase-common/src/main/java/org/apache/hbase/codec/KeyValueCodec.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/codec/KeyValueCodec.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/codec/KeyValueCodec.java (working copy)
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase.codec;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hbase.Cell;
+
+/**
+ * Codec that does KeyValue version 1 serialization.
+ *
+ * Encodes by casting Cell to KeyValue and writing out the backing array with a length prefix.
+ * This is how KVs were serialized in Puts, Deletes and Results pre-0.96. Its what would
+ * happen if you called the Writable#write KeyValue implementation. This encoder will fail
+ * if the passed Cell is not an old-school pre-0.96 KeyValue. Does not copy bytes writing.
+ * It just writes them direct to the passed stream.
+ *
+ * If you wrote two KeyValues to this encoder, it would look like this in the stream:
+ *
+ * length-of-KeyValue1 // A java int with the length of KeyValue1 backing array
+ * KeyValue1 backing array filled with a KeyValue serialized in its particular format
+ * length-of-KeyValue2
+ * KeyValue2 backing array
+ *
+ */
+public class KeyValueCodec implements Codec {
+ class KeyValueEncoder extends BaseEncoder {
+ KeyValueEncoder(final OutputStream out) {
+ super(out);
+ }
+
+ @Override
+ public void write(Cell cell) throws IOException {
+ checkFlushed();
+ // This is crass and will not work when KV changes
+ try {
+ KeyValue.lowLevelWrite((KeyValue)cell, this.out);
+ } catch (IOException e) {
+ throw new CodecException(e);
+ }
+ }
+ }
+
+ class KeyValueDecoder extends BaseDecoder {
+ KeyValueDecoder(final InputStream in) {
+ super(in);
+ }
+
+ Cell parseCell() throws IOException {
+ return KeyValue.lowLevelCreate(in);
+ }
+ }
+
+ /**
+ * Implementation depends on {@link InputStream#available()}
+ */
+ @Override
+ public Decoder getDecoder(final InputStream is) {
+ return new KeyValueDecoder(is);
+ }
+
+ @Override
+ public Encoder getEncoder(OutputStream os) {
+ return new KeyValueEncoder(os);
+ }
+}
\ No newline at end of file
Index: hbase-common/src/main/java/org/apache/hbase/io/CellOutputStream.java
===================================================================
--- hbase-common/src/main/java/org/apache/hbase/io/CellOutputStream.java (revision 0)
+++ hbase-common/src/main/java/org/apache/hbase/io/CellOutputStream.java (working copy)
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase.io;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hbase.Cell;
+import org.apache.hbase.CellScanner;
+
+/**
+ * Accepts a stream of Cells. This can be used to build a block of cells during compactions
+ * and flushes, or to build a byte[] to send to the client. This could be backed by a
+ * List, but more efficient implementations will append results to a
+ * byte[] to eliminate overhead, and possibly encode the cells further.
+ * To read Cells, use {@link CellScanner}
+ * @see CellScanner
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface CellOutputStream {
+ /**
+ * Implementation must copy the entire state of the Cell. If the written Cell is modified
+ * immediately after the write method returns, the modifications must have absolutely no effect
+ * on the copy of the Cell that was added in the write.
+ * @param cell Cell to write out
+ * @throws IOException
+ */
+ void write(Cell cell) throws IOException;
+
+ /**
+ * Let the implementation decide what to do. Usually means writing accumulated data into a byte[]
+ * that can then be read from the implementation to be sent to disk, put in the block cache, or
+ * sent over the network.
+ * @throws IOException
+ */
+ void flush() throws IOException;
+}
\ No newline at end of file
Index: hbase-common/src/test/java/org/apache/hbase/TestCellTool.java
===================================================================
--- hbase-common/src/test/java/org/apache/hbase/TestCellTool.java (revision 0)
+++ hbase-common/src/test/java/org/apache/hbase/TestCellTool.java (working copy)
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.NavigableMap;
+import java.util.TreeMap;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+
+public class TestCellTool {
+
+ @Test
+ public void testCreateCellScanner() {
+ final int count = 3;
+ final NavigableMap> map =
+ new TreeMap>(Bytes.BYTES_COMPARATOR);
+ for (int i = 0; i < count; i++) {
+ byte [] key = Bytes.toBytes(i);
+ Cell [] cs = getCells(count, key);
+ map.put(key, Arrays.asList(cs));
+ }
+ CellScanner scanner = CellTool.createCellScanner(map);
+ int i = 0;
+ while (scanner.next()) {
+ i++;
+ }
+ assertEquals(count * count, i);
+ }
+
+ static Cell [] getCells(final int howMany, final byte [] family) {
+ Cell [] cells = new Cell[howMany];
+ for (int i = 0; i < howMany; i++) {
+ byte [] index = Bytes.toBytes(i);
+ KeyValue kv = new KeyValue(index, family, index, index);
+ cells[i] = kv;
+ }
+ return cells;
+ }
+}
\ No newline at end of file
Index: hbase-common/src/test/java/org/apache/hbase/codec/TestCellCodec.java
===================================================================
--- hbase-common/src/test/java/org/apache/hbase/codec/TestCellCodec.java (revision 0)
+++ hbase-common/src/test/java/org/apache/hbase/codec/TestCellCodec.java (working copy)
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase.codec;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hbase.Cell;
+import org.apache.hbase.CellComparator;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.google.common.io.CountingInputStream;
+import com.google.common.io.CountingOutputStream;
+
+@Category(SmallTests.class)
+public class TestCellCodec {
+
+ @Test
+ public void testEmptyWorks() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CountingOutputStream cos = new CountingOutputStream(baos);
+ DataOutputStream dos = new DataOutputStream(cos);
+ Codec codec = new CellCodec();
+ Codec.Encoder encoder = codec.getEncoder(dos);
+ encoder.flush();
+ dos.close();
+ long offset = cos.getCount();
+ assertEquals(0, offset);
+ CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
+ DataInputStream dis = new DataInputStream(cis);
+ Codec.Decoder decoder = codec.getDecoder(dis);
+ assertFalse(decoder.next());
+ dis.close();
+ assertEquals(0, cis.getCount());
+ }
+
+ @Test
+ public void testOne() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CountingOutputStream cos = new CountingOutputStream(baos);
+ DataOutputStream dos = new DataOutputStream(cos);
+ Codec codec = new CellCodec();
+ Codec.Encoder encoder = codec.getEncoder(dos);
+ final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
+ encoder.write(kv);
+ encoder.flush();
+ dos.close();
+ long offset = cos.getCount();
+ CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
+ DataInputStream dis = new DataInputStream(cis);
+ Codec.Decoder decoder = codec.getDecoder(dis);
+ assertTrue(decoder.next()); // First read should pull in the KV
+ assertFalse(decoder.next()); // Second read should trip over the end-of-stream marker and return false
+ dis.close();
+ assertEquals(offset, cis.getCount());
+ }
+
+ @Test
+ public void testThree() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CountingOutputStream cos = new CountingOutputStream(baos);
+ DataOutputStream dos = new DataOutputStream(cos);
+ Codec codec = new CellCodec();
+ Codec.Encoder encoder = codec.getEncoder(dos);
+ final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1"));
+ final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2"));
+ final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3"));
+ encoder.write(kv1);
+ encoder.write(kv2);
+ encoder.write(kv3);
+ encoder.flush();
+ dos.close();
+ long offset = cos.getCount();
+ CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
+ DataInputStream dis = new DataInputStream(cis);
+ Codec.Decoder decoder = codec.getDecoder(dis);
+ assertTrue(decoder.next());
+ Cell c = decoder.get();
+ assertTrue(CellComparator.equals(c, kv1));
+ assertTrue(decoder.next());
+ c = decoder.get();
+ assertTrue(CellComparator.equals(c, kv2));
+ assertTrue(decoder.next());
+ c = decoder.get();
+ assertTrue(CellComparator.equals(c, kv3));
+ assertFalse(decoder.next());
+ dis.close();
+ assertEquals(offset, cis.getCount());
+ }
+}
\ No newline at end of file
Index: hbase-common/src/test/java/org/apache/hbase/codec/TestKeyValueCodec.java
===================================================================
--- hbase-common/src/test/java/org/apache/hbase/codec/TestKeyValueCodec.java (revision 0)
+++ hbase-common/src/test/java/org/apache/hbase/codec/TestKeyValueCodec.java (working copy)
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hbase.codec;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.google.common.io.CountingInputStream;
+import com.google.common.io.CountingOutputStream;
+
+@Category(SmallTests.class)
+public class TestKeyValueCodec {
+ @Test
+ public void testEmptyWorks() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CountingOutputStream cos = new CountingOutputStream(baos);
+ DataOutputStream dos = new DataOutputStream(cos);
+ KeyValueCodec kvc = new KeyValueCodec();
+ Codec.Encoder encoder = kvc.getEncoder(dos);
+ encoder.flush();
+ dos.close();
+ long offset = cos.getCount();
+ assertEquals(0, offset);
+ CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
+ DataInputStream dis = new DataInputStream(cis);
+ Codec.Decoder decoder = kvc.getDecoder(dis);
+ assertFalse(decoder.next());
+ dis.close();
+ assertEquals(0, cis.getCount());
+ }
+
+ @Test
+ public void testOne() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CountingOutputStream cos = new CountingOutputStream(baos);
+ DataOutputStream dos = new DataOutputStream(cos);
+ KeyValueCodec kvc = new KeyValueCodec();
+ Codec.Encoder encoder = kvc.getEncoder(dos);
+ final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
+ final long length = kv.getLength() + Bytes.SIZEOF_INT;
+ encoder.write(kv);
+ encoder.flush();
+ dos.close();
+ long offset = cos.getCount();
+ assertEquals(length, offset);
+ CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
+ DataInputStream dis = new DataInputStream(cis);
+ Codec.Decoder decoder = kvc.getDecoder(dis);
+ assertTrue(decoder.next()); // First read should pull in the KV
+ assertFalse(decoder.next()); // Second read should trip over the end-of-stream marker and return false
+ dis.close();
+ assertEquals(length, cis.getCount());
+ }
+
+ @Test
+ public void testThree() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CountingOutputStream cos = new CountingOutputStream(baos);
+ DataOutputStream dos = new DataOutputStream(cos);
+ KeyValueCodec kvc = new KeyValueCodec();
+ Codec.Encoder encoder = kvc.getEncoder(dos);
+ final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), Bytes.toBytes("1"));
+ final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2"));
+ final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3"));
+ final long length = kv1.getLength() + Bytes.SIZEOF_INT;
+ encoder.write(kv1);
+ encoder.write(kv2);
+ encoder.write(kv3);
+ encoder.flush();
+ dos.close();
+ long offset = cos.getCount();
+ assertEquals(length * 3, offset);
+ CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
+ DataInputStream dis = new DataInputStream(cis);
+ Codec.Decoder decoder = kvc.getDecoder(dis);
+ assertTrue(decoder.next());
+ KeyValue kv = (KeyValue)decoder.get();
+ assertTrue(kv1.equals(kv));
+ assertTrue(decoder.next());
+ kv = (KeyValue)decoder.get();
+ assertTrue(kv2.equals(kv));
+ assertTrue(decoder.next());
+ kv = (KeyValue)decoder.get();
+ assertTrue(kv3.equals(kv));
+ assertFalse(decoder.next());
+ dis.close();
+ assertEquals((length * 3), cis.getCount());
+ }
+}
\ No newline at end of file
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeCodec.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeCodec.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeCodec.java (working copy)
@@ -133,7 +133,7 @@
try {
searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvccVersion);
while (searcher.next()) {
- KeyValue currentCell = KeyValueTool.copyToNewKeyValue(searcher.getCurrent());
+ KeyValue currentCell = KeyValueTool.copyToNewKeyValue(searcher.get());
// needs to be modified for DirectByteBuffers. no existing methods to
// write VLongs to byte[]
int offset = result.arrayOffset() + result.position();
@@ -163,7 +163,7 @@
if (!searcher.positionAtFirstCell()) {
return null;
}
- return KeyValueTool.copyKeyToNewByteBuffer(searcher.getCurrent());
+ return KeyValueTool.copyKeyToNewByteBuffer(searcher.get());
} finally {
DecoderFactory.checkIn(searcher);
}
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeSeeker.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeSeeker.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeSeeker.java (working copy)
@@ -25,10 +25,10 @@
import org.apache.hadoop.hbase.KeyValueTool;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellScannerPosition;
-import org.apache.hbase.cell.CellTool;
+import org.apache.hbase.CellTool;
import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
+import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
/**
* These methods have the same definition as any implementation of the EncodedSeeker.
@@ -69,13 +69,13 @@
@Override
public ByteBuffer getKeyDeepCopy() {
- return KeyValueTool.copyKeyToNewByteBuffer(ptSearcher.getCurrent());
+ return KeyValueTool.copyKeyToNewByteBuffer(ptSearcher.get());
}
@Override
public ByteBuffer getValueShallowCopy() {
- return CellTool.getValueBufferShallowCopy(ptSearcher.getCurrent());
+ return CellTool.getValueBufferShallowCopy(ptSearcher.get());
}
/**
@@ -83,7 +83,7 @@
*/
@Override
public ByteBuffer getKeyValueBuffer() {
- return KeyValueTool.copyToNewByteBuffer(ptSearcher.getCurrent());
+ return KeyValueTool.copyToNewByteBuffer(ptSearcher.get());
}
/**
@@ -91,7 +91,7 @@
*/
@Override
public KeyValue getKeyValue() {
- return KeyValueTool.copyToNewKeyValue(ptSearcher.getCurrent());
+ return KeyValueTool.copyToNewKeyValue(ptSearcher.get());
}
/**
@@ -104,9 +104,8 @@
* The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to use
* this method instead of the getKeyValue() methods above.
*/
-// @Override
- public Cell getCurrent() {
- return ptSearcher.getCurrent();
+ public Cell get() {
+ return ptSearcher.get();
}
@Override
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java (working copy)
@@ -19,14 +19,15 @@
package org.apache.hbase.codec.prefixtree.decode;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValueTool;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellComparator;
+import org.apache.hbase.CellComparator;
+import org.apache.hbase.CellScanner;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.decode.column.ColumnReader;
import org.apache.hbase.codec.prefixtree.decode.row.RowNodeReader;
import org.apache.hbase.codec.prefixtree.decode.timestamp.MvccVersionDecoder;
import org.apache.hbase.codec.prefixtree.decode.timestamp.TimestampDecoder;
-import org.apache.hbase.codec.prefixtree.scanner.CellScanner;
/**
* Extends PtCell and manipulates its protected fields. Could alternatively contain a PtCell and
@@ -111,7 +112,7 @@
resetToBeforeFirstEntry();
}
- @Override
+ // Does this have to be in the CellScanner Interface? TODO
public void resetToBeforeFirstEntry() {
beforeFirst = true;
afterLast = false;
@@ -142,13 +143,17 @@
/********************** CellScanner **********************/
@Override
- public PrefixTreeCell getCurrent() {
+ public Cell get() {
if(isOutOfBounds()){
return null;
}
- return this;
+ return (Cell)this;
}
+ @Override
+ public Cell getDeepCopy() {
+ return KeyValueTool.copyToNewKeyValue(get());
+ }
/******************* Object methods ************************/
@@ -168,11 +173,11 @@
*/
@Override
public String toString() {
- PrefixTreeCell currentCell = getCurrent();
+ Cell currentCell = get();
if(currentCell==null){
return "null";
}
- return currentCell.getKeyValueString();
+ return ((PrefixTreeCell)currentCell).getKeyValueString();
}
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java (working copy)
@@ -20,9 +20,9 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellScannerPosition;
-import org.apache.hbase.cell.CellTool;
+import org.apache.hbase.CellTool;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
+import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import com.google.common.primitives.UnsignedBytes;
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeCell.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeCell.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeCell.java (working copy)
@@ -22,7 +22,7 @@
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTool;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellComparator;
+import org.apache.hbase.CellComparator;
/**
* As the PrefixTreeArrayScanner moves through the tree bytes, it changes the values in the fields
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java (working copy)
@@ -29,14 +29,14 @@
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellOutputStream;
-import org.apache.hbase.cell.CellTool;
+import org.apache.hbase.CellTool;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.encode.column.ColumnSectionWriter;
import org.apache.hbase.codec.prefixtree.encode.other.CellTypeEncoder;
import org.apache.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.apache.hbase.codec.prefixtree.encode.row.RowSectionWriter;
import org.apache.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
+import org.apache.hbase.io.CellOutputStream;
import org.apache.hbase.util.byterange.ByteRangeSet;
import org.apache.hbase.util.byterange.impl.ByteRangeHashSet;
import org.apache.hbase.util.byterange.impl.ByteRangeTreeSet;
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellScanner.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellScanner.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellScanner.java (working copy)
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hbase.codec.prefixtree.scanner;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hbase.Cell;
-
-/**
- * Alternate name may be CellInputStream
- *
- * An interface for iterating through a sequence of cells. Similar to Java's Iterator, but without
- * the hasNext() or remove() methods. The hasNext() method is problematic because it may require
- * actually loading the next object, which in turn requires storing the previous object somewhere.
- * The core data block decoder should be as fast as possible, so we push the complexity and
- * performance expense of concurrently tracking multiple cells to layers above the CellScanner.
- *
- * The getCurrentCell() method will return a reference to a Cell implementation. This reference may
- * or may not point to a reusable cell implementation, so users of the CellScanner should not, for
- * example, accumulate a List of Cells. All of the references may point to the same object, which
- * would be the latest state of the underlying Cell. In short, the Cell is mutable.
- *
- * At a minimum, an implementation will need to be able to advance from one cell to the next in a
- * LinkedList fashion. The nextQualifier(), nextFamily(), and nextRow() methods can all be
- * implemented by calling nextCell(), however, if the DataBlockEncoding supports random access into
- * the block then it may provide smarter versions of these methods.
- *
- * Typical usage:
- *
- *
- * while (scanner.nextCell()) {
- * Cell cell = scanner.getCurrentCell();
- * // do something
- * }
- *
- */
-@InterfaceAudience.Private
-public interface CellScanner{
-
- /**
- * Reset any state in the scanner so it appears it was freshly opened.
- */
- void resetToBeforeFirstEntry();
-
- /**
- * @return the current Cell which may be mutable
- */
- Cell getCurrent();
-
- /**
- * Advance the scanner 1 cell.
- * @return true if the next cell is found and getCurrentCell() will return a valid Cell
- */
- boolean next();
-
-}
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellScannerPosition.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellScannerPosition.java (revision 0)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellScannerPosition.java (working copy)
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase.codec.prefixtree.scanner;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * An indicator of the state of the scanner after an operation such as nextCell() or positionAt(..).
+ * For example:
+ *
+ * - In a DataBlockScanner, the AFTER_LAST position indicates to the parent StoreFileScanner that
+ * it should load the next block.
+ * - In a StoreFileScanner, the AFTER_LAST position indicates that the file has been exhausted.
+ * - In a RegionScanner, the AFTER_LAST position indicates that the scanner should move to the
+ * next region.
+ *
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public enum CellScannerPosition {
+
+ /**
+ * getCurrentCell() will NOT return a valid cell. Calling nextCell() will advance to the first
+ * cell.
+ */
+ BEFORE_FIRST,
+
+ /**
+ * getCurrentCell() will return a valid cell, but it is not the cell requested by positionAt(..),
+ * rather it is the nearest cell before the requested cell.
+ */
+ BEFORE,
+
+ /**
+ * getCurrentCell() will return a valid cell, and it is exactly the cell that was requested by
+ * positionAt(..).
+ */
+ AT,
+
+ /**
+ * getCurrentCell() will return a valid cell, but it is not the cell requested by positionAt(..),
+ * rather it is the nearest cell after the requested cell.
+ */
+ AFTER,
+
+ /**
+ * getCurrentCell() will NOT return a valid cell. Calling nextCell() will have no effect.
+ */
+ AFTER_LAST
+
+}
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellSearcher.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellSearcher.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellSearcher.java (working copy)
@@ -20,7 +20,6 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellScannerPosition;
/**
* Methods for seeking to a random {@link Cell} inside a sorted collection of cells. Indicates that
@@ -28,6 +27,10 @@
*/
@InterfaceAudience.Private
public interface CellSearcher extends ReversibleCellScanner {
+ /**
+ * Reset any state in the scanner so it appears it was freshly opened.
+ */
+ void resetToBeforeFirstEntry();
/**
* Do everything within this scanner's power to find the key. Look forward and backwards.
@@ -62,7 +65,7 @@
CellScannerPosition positionAtOrAfter(Cell key);
/**
- * Note: Added for backwards compatibility with
+ * Note: Added for backwards compatibility with
* {@link org.apache.hadoop.hbase.regionserver.KeyValueScanner#reseek}
*
* Look for the key, but only look after the current position. Probably not needed for an
Index: hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/ReversibleCellScanner.java
===================================================================
--- hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/ReversibleCellScanner.java (revision 1448760)
+++ hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/ReversibleCellScanner.java (working copy)
@@ -19,6 +19,7 @@
package org.apache.hbase.codec.prefixtree.scanner;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hbase.CellScanner;
/**
* An extension of CellScanner indicating the scanner supports iterating backwards through cells.
@@ -35,7 +36,7 @@
* @return true if the operation was successful, meaning getCurrentCell() will return a valid
* Cell.
* false if there were no previous cells, meaning getCurrentCell() will return null.
- * Scanner position will be {@link org.apache.hbase.cell.CellScannerPosition#BEFORE_FIRST}
+ * Scanner position will be {@link org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition#BEFORE_FIRST}
*/
boolean previous();
@@ -45,7 +46,7 @@
* @return true if the operation was successful, meaning getCurrentCell() will return a valid
* Cell.
* false if there were no previous cells, meaning getCurrentCell() will return null.
- * Scanner position will be {@link org.apache.hbase.cell.CellScannerPosition#BEFORE_FIRST}
+ * Scanner position will be {@link org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition#BEFORE_FIRST}
*/
boolean previousRow(boolean endOfRow);
Index: hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/BaseTestRowData.java
===================================================================
--- hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/BaseTestRowData.java (revision 1448760)
+++ hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/BaseTestRowData.java (working copy)
@@ -21,7 +21,7 @@
import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hbase.cell.CellComparator;
+import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
Index: hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java
===================================================================
--- hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java (revision 1448760)
+++ hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java (working copy)
@@ -28,10 +28,10 @@
import org.apache.hadoop.hbase.KeyValueTool;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellComparator;
-import org.apache.hbase.cell.CellScannerPosition;
+import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
+import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
import org.junit.Test;
@@ -75,7 +75,7 @@
while (searcher.next()) {
++i;
KeyValue inputCell = rows.getInputs().get(i);
- Cell outputCell = searcher.getCurrent();
+ Cell outputCell = searcher.get();
// check all 3 permutations of equals()
Assert.assertEquals(inputCell, outputCell);
@@ -100,7 +100,7 @@
++i;
int oppositeIndex = rows.getInputs().size() - i - 1;
KeyValue inputKv = rows.getInputs().get(oppositeIndex);
- KeyValue outputKv = KeyValueTool.copyToNewKeyValue(searcher.getCurrent());
+ KeyValue outputKv = KeyValueTool.copyToNewKeyValue(searcher.get());
Assert.assertEquals(inputKv, outputKv);
}
Assert.assertEquals(rows.getInputs().size(), i + 1);
@@ -118,7 +118,7 @@
for (KeyValue kv : rows.getInputs()) {
boolean hit = searcher.positionAt(kv);
Assert.assertTrue(hit);
- Cell foundKv = searcher.getCurrent();
+ Cell foundKv = searcher.get();
Assert.assertTrue(CellComparator.equals(kv, foundKv));
}
} finally {
@@ -169,7 +169,7 @@
/*
* TODO: why i+1 instead of i?
*/
- Assert.assertEquals(rows.getInputs().get(i+1), searcher.getCurrent());
+ Assert.assertEquals(rows.getInputs().get(i+1), searcher.get());
}
}
} finally {
Index: hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestRowEncoder.java
===================================================================
--- hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestRowEncoder.java (revision 1448760)
+++ hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestRowEncoder.java (working copy)
@@ -108,7 +108,7 @@
while (searcher.next()) {
++counter;
KeyValue inputKv = rows.getInputs().get(counter);
- KeyValue outputKv = KeyValueTool.copyToNewKeyValue(searcher.getCurrent());
+ KeyValue outputKv = KeyValueTool.copyToNewKeyValue(searcher.get());
assertKeyAndValueEqual(inputKv, outputKv);
}
// assert same number of cells
@@ -127,7 +127,7 @@
++counter;
int oppositeIndex = rows.getInputs().size() - counter - 1;
KeyValue inputKv = rows.getInputs().get(oppositeIndex);
- KeyValue outputKv = KeyValueTool.copyToNewKeyValue(searcher.getCurrent());
+ KeyValue outputKv = KeyValueTool.copyToNewKeyValue(searcher.get());
assertKeyAndValueEqual(inputKv, outputKv);
}
Assert.assertEquals(rows.getInputs().size(), counter + 1);
@@ -157,7 +157,7 @@
int oppositeIndex = rows.getInputs().size() - counter - 1;
KeyValue inputKv = rows.getInputs().get(oppositeIndex);
- KeyValue outputKv = KeyValueTool.copyToNewKeyValue(searcher.getCurrent());
+ KeyValue outputKv = KeyValueTool.copyToNewKeyValue(searcher.get());
assertKeyAndValueEqual(inputKv, outputKv);
}
Assert.assertEquals(rows.getInputs().size(), counter + 1);
Index: hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java
===================================================================
--- hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java (revision 1448760)
+++ hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java (working copy)
@@ -22,9 +22,9 @@
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hbase.cell.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
+import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
@@ -75,9 +75,9 @@
KeyValue cfcRow = KeyValue.createFirstOnRow(Bytes.toBytes("cfc"));
CellScannerPosition position = searcher.positionAtOrAfter(cfcRow);
Assert.assertEquals(CellScannerPosition.AFTER, position);
- Assert.assertEquals(d.get(2), searcher.getCurrent());
+ Assert.assertEquals(d.get(2), searcher.get());
searcher.previous();
- Assert.assertEquals(d.get(1), searcher.getCurrent());
+ Assert.assertEquals(d.get(1), searcher.get());
}
}
Index: hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java
===================================================================
--- hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java (revision 1448760)
+++ hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java (working copy)
@@ -24,7 +24,7 @@
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hbase.cell.CellComparator;
+import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
import com.google.common.collect.Lists;
Index: hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java
===================================================================
--- hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java (revision 1448760)
+++ hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java (working copy)
@@ -23,9 +23,9 @@
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellComparator;
-import org.apache.hbase.cell.CellScannerPosition;
+import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
+import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
@@ -67,12 +67,12 @@
//test first cell
searcher.next();
- Cell first = searcher.getCurrent();
+ Cell first = searcher.get();
Assert.assertTrue(CellComparator.equals(d.get(0), first));
//test first cell in second row
Assert.assertTrue(searcher.positionAt(d.get(1)));
- Assert.assertTrue(CellComparator.equals(d.get(1), searcher.getCurrent()));
+ Assert.assertTrue(CellComparator.equals(d.get(1), searcher.get()));
testBetween1and2(searcher);
testBetween2and3(searcher);
@@ -94,12 +94,12 @@
//test atOrBefore
p = searcher.positionAtOrBefore(betweenAAndAAA);
Assert.assertEquals(CellScannerPosition.BEFORE, p);
- Assert.assertTrue(CellComparator.equals(searcher.getCurrent(), d.get(1)));
+ Assert.assertTrue(CellComparator.equals(searcher.get(), d.get(1)));
//test atOrAfter
p = searcher.positionAtOrAfter(betweenAAndAAA);
Assert.assertEquals(CellScannerPosition.AFTER, p);
- Assert.assertTrue(CellComparator.equals(searcher.getCurrent(), d.get(2)));
+ Assert.assertTrue(CellComparator.equals(searcher.get(), d.get(2)));
}
private void testBetween2and3(CellSearcher searcher){
@@ -112,12 +112,12 @@
//test atOrBefore
p = searcher.positionAtOrBefore(betweenAAAndB);
Assert.assertEquals(CellScannerPosition.BEFORE, p);
- Assert.assertTrue(CellComparator.equals(searcher.getCurrent(), d.get(2)));
+ Assert.assertTrue(CellComparator.equals(searcher.get(), d.get(2)));
//test atOrAfter
p = searcher.positionAtOrAfter(betweenAAAndB);
Assert.assertEquals(CellScannerPosition.AFTER, p);
- Assert.assertTrue(CellComparator.equals(searcher.getCurrent(), d.get(3)));
+ Assert.assertTrue(CellComparator.equals(searcher.get(), d.get(3)));
}
}
Index: hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSimple.java
===================================================================
--- hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSimple.java (revision 1448760)
+++ hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSimple.java (working copy)
@@ -24,9 +24,9 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hbase.Cell;
-import org.apache.hbase.cell.CellComparator;
-import org.apache.hbase.cell.CellScannerPosition;
+import org.apache.hbase.CellComparator;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
+import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
@@ -67,12 +67,12 @@
// test first cell
searcher.next();
- Cell first = searcher.getCurrent();
+ Cell first = searcher.get();
Assert.assertTrue(CellComparator.equals(d.get(0), first));
// test first cell in second row
Assert.assertTrue(searcher.positionAt(d.get(3)));
- Assert.assertTrue(CellComparator.equals(d.get(3), searcher.getCurrent()));
+ Assert.assertTrue(CellComparator.equals(d.get(3), searcher.get()));
Cell between4And5 = new KeyValue(rowB, cf, cq1, ts - 2, v0);
@@ -82,12 +82,12 @@
// test atOrBefore
p = searcher.positionAtOrBefore(between4And5);
Assert.assertEquals(CellScannerPosition.BEFORE, p);
- Assert.assertTrue(CellComparator.equals(searcher.getCurrent(), d.get(4)));
+ Assert.assertTrue(CellComparator.equals(searcher.get(), d.get(4)));
// test atOrAfter
p = searcher.positionAtOrAfter(between4And5);
Assert.assertEquals(CellScannerPosition.AFTER, p);
- Assert.assertTrue(CellComparator.equals(searcher.getCurrent(), d.get(5)));
+ Assert.assertTrue(CellComparator.equals(searcher.get(), d.get(5)));
// test when key falls before first key in block
Cell beforeFirst = new KeyValue(Bytes.toBytes("A"), cf, cq0, ts, v0);
@@ -96,8 +96,8 @@
Assert.assertEquals(CellScannerPosition.BEFORE_FIRST, p);
p = searcher.positionAtOrAfter(beforeFirst);
Assert.assertEquals(CellScannerPosition.AFTER, p);
- Assert.assertTrue(CellComparator.equals(searcher.getCurrent(), d.get(0)));
- Assert.assertEquals(d.get(0), searcher.getCurrent());
+ Assert.assertTrue(CellComparator.equals(searcher.get(), d.get(0)));
+ Assert.assertEquals(d.get(0), searcher.get());
// test when key falls after last key in block
Cell afterLast = new KeyValue(Bytes.toBytes("z"), cf, cq0, ts, v0);// must be lower case z
@@ -106,7 +106,7 @@
Assert.assertEquals(CellScannerPosition.AFTER_LAST, p);
p = searcher.positionAtOrBefore(afterLast);
Assert.assertEquals(CellScannerPosition.BEFORE, p);
- Assert.assertTrue(CellComparator.equals(searcher.getCurrent(), CollectionUtils.getLast(d)));
+ Assert.assertTrue(CellComparator.equals(searcher.get(), CollectionUtils.getLast(d)));
}
}
Index: hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java
===================================================================
--- hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java (revision 1448760)
+++ hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java (working copy)
@@ -22,9 +22,9 @@
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hbase.cell.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hbase.codec.prefixtree.row.BaseTestRowData;
+import org.apache.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
import org.junit.Assert;
@@ -68,6 +68,6 @@
KeyValue afterLast = KeyValue.createFirstOnRow(Bytes.toBytes("zzz"));
CellScannerPosition position = searcher.positionAtOrAfter(afterLast);
Assert.assertEquals(CellScannerPosition.AFTER_LAST, position);
- Assert.assertNull(searcher.getCurrent());
+ Assert.assertNull(searcher.get());
}
}
Index: hbase-protocol/pom.xml
===================================================================
--- hbase-protocol/pom.xml (revision 1448760)
+++ hbase-protocol/pom.xml (working copy)
@@ -53,11 +53,16 @@
-
-
- com.google.protobuf
- protobuf-java
-
+
+
+ org.apache.hbase
+ hbase-common
+
+
+
+ com.google.protobuf
+ protobuf-java
+
@@ -74,4 +79,4 @@
-
\ No newline at end of file
+
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java (revision 1448760)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java (working copy)
@@ -35,7 +35,7 @@
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hbase.cell.CellComparator;
+import org.apache.hbase.CellComparator;
import org.junit.Test;
import org.junit.experimental.categories.Category;
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (revision 1448760)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (working copy)
@@ -91,7 +91,7 @@
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.cell.CellComparator;
+import org.apache.hbase.CellComparator;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -225,7 +225,7 @@
RegionScanner scanner1 = region.getScanner(scan);
System.out.println("Smallest read point:" + region.getSmallestReadPoint());
-
+
region.compactStores(true);
scanner1.reseek(Bytes.toBytes("r2"));
@@ -254,7 +254,7 @@
for (long i = minSeqId; i <= maxSeqId; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
fs.create(recoveredEdits);
- HLog.Writer writer = HLogFactory.createWriter(fs,
+ HLog.Writer writer = HLogFactory.createWriter(fs,
recoveredEdits, conf);
long time = System.nanoTime();
@@ -306,7 +306,7 @@
for (long i = minSeqId; i <= maxSeqId; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
fs.create(recoveredEdits);
- HLog.Writer writer = HLogFactory.createWriter(fs,
+ HLog.Writer writer = HLogFactory.createWriter(fs,
recoveredEdits, conf);
long time = System.nanoTime();
@@ -367,7 +367,7 @@
recoveredEditsDir, String.format("%019d", minSeqId-1));
FSDataOutputStream dos= fs.create(recoveredEdits);
dos.close();
-
+
Map maxSeqIdInStores = new TreeMap(
Bytes.BYTES_COMPARATOR);
for (Store store : region.getStores().values()) {
@@ -525,7 +525,7 @@
this.region = initHRegion(TABLE, getName(), conf, true, Bytes.toBytes("somefamily"));
boolean exceptionCaught = false;
Append append = new Append(Bytes.toBytes("somerow"));
- append.add(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
+ append.add(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
Bytes.toBytes("somevalue"));
try {
region.append(append, false);
@@ -541,7 +541,7 @@
public void testIncrWithReadOnlyTable() throws Exception {
byte[] TABLE = Bytes.toBytes("readOnlyTable");
this.region = initHRegion(TABLE, getName(), conf, true, Bytes.toBytes("somefamily"));
- boolean exceptionCaught = false;
+ boolean exceptionCaught = false;
Increment inc = new Increment(Bytes.toBytes("somerow"));
inc.addColumn(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"), 1L);
try {
@@ -710,7 +710,7 @@
LOG.info("...starting put thread while holding lock");
ctx.addThread(putter);
ctx.startThreads();
-
+
LOG.info("...waiting for put thread to sync first time");
long startWait = System.currentTimeMillis();
while (metricsAssertHelper.getCounter("syncTimeNumOps", source) == syncs +2 ) {
@@ -730,7 +730,7 @@
assertEquals((i == 5) ? OperationStatusCode.BAD_FAMILY :
OperationStatusCode.SUCCESS, codes[i].getOperationStatusCode());
}
-
+
LOG.info("Nexta, a batch put which uses an already-held lock");
lockedRow = region.obtainRowLock(Bytes.toBytes("row_2"));
LOG.info("...obtained row lock");
@@ -740,7 +740,7 @@
if (i == 2) pair.setSecond(lockedRow);
putsAndLocks.add(pair);
}
-
+
codes = region.batchMutate(putsAndLocks.toArray(new Pair[0]));
LOG.info("...performed put");
for (int i = 0; i < 10; i++) {
@@ -749,7 +749,7 @@
}
// Make sure we didn't do an extra batch
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 5, source);
-
+
// Make sure we still hold lock
assertTrue(region.isRowLocked(lockedRow));
LOG.info("...releasing lock");
@@ -1867,7 +1867,7 @@
/**
* This method tests https://issues.apache.org/jira/browse/HBASE-2516.
- * @throws IOException
+ * @throws IOException
*/
public void testGetScanner_WithRegionClosed() throws IOException {
byte[] tableName = Bytes.toBytes("testtable");
@@ -3434,11 +3434,11 @@
}
}
}
-
+
/**
* Testcase to check state of region initialization task set to ABORTED or not if any exceptions
* during initialization
- *
+ *
* @throws Exception
*/
@Test
@@ -3602,7 +3602,7 @@
Result res = this.region.get(get);
List kvs = res.getColumn(Incrementer.family,
Incrementer.qualifier);
-
+
//we just got the latest version
assertEquals(kvs.size(), 1);
KeyValue kv = kvs.get(0);
@@ -3696,7 +3696,7 @@
Result res = this.region.get(get);
List kvs = res.getColumn(Appender.family,
Appender.qualifier);
-
+
//we just got the latest version
assertEquals(kvs.size(), 1);
KeyValue kv = kvs.get(0);
@@ -3765,7 +3765,7 @@
assertEquals(1, kvs.size());
assertEquals(Bytes.toBytes("value1"), kvs.get(0).getValue());
}
-
+
private void putData(int startRow, int numRows, byte [] qf,
byte [] ...families)
throws IOException {
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java (revision 1448760)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java (working copy)
@@ -48,7 +48,7 @@
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hbase.cell.CellComparator;
+import org.apache.hbase.CellComparator;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
@@ -122,7 +122,7 @@
@Parameters
public static final Collection | | | | | | |