diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
index b6600a9..c599f06 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
@@ -138,19 +138,19 @@ public class Result implements CellScannable, CellScanner {
*
Note: You must ensure that the keyvalues are already sorted.
* @param cells List of cells
*/
- public static Result create(List cells) {
+ public static Result create(List extends Cell> cells) {
return create(cells, null);
}
- public static Result create(List cells, Boolean exists) {
+ public static Result create(List extends Cell> cells, Boolean exists) {
return create(cells, exists, false);
}
- public static Result create(List cells, Boolean exists, boolean stale) {
+ public static Result create(List extends Cell> cells, Boolean exists, boolean stale) {
return create(cells, exists, stale, false);
}
- public static Result create(List cells, Boolean exists, boolean stale, boolean partial) {
+ public static Result create(List extends Cell> cells, Boolean exists, boolean stale, boolean partial) {
if (exists != null){
return new Result(null, exists, stale, partial);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
index 97724bd..f791b51 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
@@ -24,6 +24,7 @@ import java.math.RoundingMode;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg;
@@ -51,6 +52,20 @@ public class BigDecimalColumnInterpreter extends ColumnInterpreter{
@Override
- public Double getValue(byte[] colFamily, byte[] colQualifier, Cell c)
+ public Double getValue(byte[] colFamily, byte[] colQualifier, Cell c) throws IOException {
+ return null;
+ }
+
+ @Override
+ public Double getValue(byte[] colFamily, byte[] colQualifier, ServerCell c)
throws IOException {
if (c == null || c.getValueLength() != Bytes.SIZEOF_DOUBLE)
return null;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
index e8e5e3a..70d2938 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.client.coprocessor;
import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg;
@@ -45,6 +46,13 @@ public class LongColumnInterpreter extends ColumnInterpreter {
throws IOException;
/**
+ *
+ * @param colFamily
+ * @param colQualifier
+ * @param c
+ * @return value of type T
+ * @throws IOException
+ */
+ public abstract T getValue(byte[] colFamily, byte[] colQualifier, ServerCell c)
+ throws IOException;
+
+ /**
* @param l1
* @param l2
* @return sum or non null value among (if either of them is null); otherwise
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
index 572de9f..fd3011c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -57,7 +57,7 @@ public class ColumnCountGetFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
this.count++;
return filterAllRemaining() ? ReturnCode.NEXT_COL : ReturnCode.INCLUDE_AND_NEXT_COL;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
index 673ca6e..6c0ef68 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -105,7 +105,7 @@ public class ColumnPaginationFilter extends FilterBase
}
@Override
- public ReturnCode filterKeyValue(Cell v)
+ public ReturnCode filterKeyValue(ServerCell v)
{
if (columnOffset != null) {
if (count >= limit) {
@@ -144,7 +144,7 @@ public class ColumnPaginationFilter extends FilterBase
}
@Override
- public Cell getNextCellHint(Cell cell) {
+ public ServerCell getNextCellHint(ServerCell cell) {
return KeyValueUtil.createFirstOnRow(
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(),
cell.getFamilyOffset(), cell.getFamilyLength(), columnOffset, 0, columnOffset.length);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index d2f058a..d56ded3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -52,7 +52,7 @@ public class ColumnPrefixFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell kv) {
+ public ReturnCode filterKeyValue(ServerCell kv) {
if (this.prefix == null || kv.getQualifierArray() == null) {
return ReturnCode.INCLUDE;
} else {
@@ -130,7 +130,7 @@ public class ColumnPrefixFilter extends FilterBase {
}
@Override
- public Cell getNextCellHint(Cell cell) {
+ public ServerCell getNextCellHint(ServerCell cell) {
return KeyValueUtil.createFirstOnRow(
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(),
cell.getFamilyOffset(), cell.getFamilyLength(), prefix, 0, prefix.length);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
index d8ea094..e0f1fd7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
@@ -23,8 +23,8 @@ import static org.apache.hadoop.hbase.util.Bytes.len;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -116,7 +116,7 @@ public class ColumnRangeFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell kv) {
+ public ReturnCode filterKeyValue(ServerCell kv) {
// TODO have a column compare method in Cell
byte[] buffer = kv.getQualifierArray();
int qualifierOffset = kv.getQualifierOffset();
@@ -216,7 +216,7 @@ public class ColumnRangeFilter extends FilterBase {
}
@Override
- public Cell getNextCellHint(Cell cell) {
+ public ServerCell getNextCellHint(ServerCell cell) {
return KeyValueUtil.createFirstOnRow(cell.getRowArray(), cell.getRowOffset(), cell
.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell
.getFamilyLength(), this.minColumn, 0, len(this.minColumn));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
index 6d19842..731fa65 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
@@ -25,8 +25,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -134,7 +134,7 @@ public class DependentColumnFilter extends CompareFilter {
}
@Override
- public ReturnCode filterKeyValue(Cell c) {
+ public ReturnCode filterKeyValue(ServerCell c) {
// Check if the column and qualifier match
if (!CellUtil.matchingColumn(c, this.columnFamily, this.columnQualifier)) {
// include non-matches for the time being, they'll be discarded afterwards
@@ -154,12 +154,12 @@ public class DependentColumnFilter extends CompareFilter {
}
@Override
- public void filterRowCells(List kvs) {
- Iterator extends Cell> it = kvs.iterator();
- Cell kv;
+ public void filterRowCells(List cells) {
+ Iterator extends ServerCell> it = cells.iterator();
+ ServerCell cell;
while(it.hasNext()) {
- kv = it.next();
- if(!stampSet.contains(kv.getTimestamp())) {
+ cell = it.next();
+ if(!stampSet.contains(cell.getTimestamp())) {
it.remove();
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
index e79a4d5..1edf6bc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -60,7 +60,7 @@ public class FamilyFilter extends CompareFilter {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
int familyLength = v.getFamilyLength();
if (familyLength > 0) {
if (doCompare(this.compareOp, this.comparator, v.getFamilyArray(),
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 88bf842..d26ade1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.List;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -113,7 +113,7 @@ public abstract class Filter {
* @throws IOException in case an I/O or an filter specific failure needs to be signaled.
* @see Filter.ReturnCode
*/
- abstract public ReturnCode filterKeyValue(final Cell v) throws IOException;
+ abstract public ReturnCode filterKeyValue(final ServerCell v) throws IOException;
/**
* Give the filter a chance to transform the passed KeyValue. If the Cell is changed a new
@@ -132,7 +132,7 @@ public abstract class Filter {
* @return the changed KeyValue
* @throws IOException in case an I/O or an filter specific failure needs to be signaled.
*/
- abstract public Cell transformCell(final Cell v) throws IOException;
+ abstract public ServerCell transformCell(final ServerCell v) throws IOException;
/**
* Return codes for filterValue().
@@ -176,7 +176,7 @@ public abstract class Filter {
* @param kvs the list of Cells to be filtered
* @throws IOException in case an I/O or an filter specific failure needs to be signaled.
*/
- abstract public void filterRowCells(List kvs) throws IOException;
+ abstract public void filterRowCells(List kvs) throws IOException;
/**
* Primarily used to check for conflicts with scans(such as scans that do not read a full row at a
@@ -211,7 +211,7 @@ public abstract class Filter {
* seek to next.
* @throws IOException in case an I/O or an filter specific failure needs to be signaled.
*/
- abstract public Cell getNextCellHint(final Cell currentCell) throws IOException;
+ abstract public ServerCell getNextCellHint(final ServerCell currentCell) throws IOException;
/**
* Check that given column family is essential for filter to check row. Most filters always return
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
index a04dd89..941cfc8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
@@ -23,7 +23,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
@@ -75,7 +75,7 @@ public abstract class FilterBase extends Filter {
* @inheritDoc
*/
@Override
- public Cell transformCell(Cell v) throws IOException {
+ public ServerCell transformCell(ServerCell v) throws IOException {
return v;
}
@@ -86,7 +86,7 @@ public abstract class FilterBase extends Filter {
* @inheritDoc
*/
@Override
- public void filterRowCells(List ignored) throws IOException {
+ public void filterRowCells(List ignored) throws IOException {
}
/**
@@ -118,7 +118,7 @@ public abstract class FilterBase extends Filter {
*
* @inheritDoc
*/
- public Cell getNextCellHint(Cell currentCell) throws IOException {
+ public ServerCell getNextCellHint(ServerCell currentCell) throws IOException {
return null;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index ba1a818..c490922 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -23,9 +23,9 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -70,7 +70,7 @@ final public class FilterList extends Filter {
private Filter seekHintFilter = null;
/** Reference Cell used by {@link #transformCell(Cell)} for validation purpose. */
- private Cell referenceCell = null;
+ private ServerCell referenceCell = null;
/**
* When filtering a given Cell in {@link #filterKeyValue(Cell)},
@@ -79,7 +79,7 @@ final public class FilterList extends Filter {
* Individual filters transformation are applied only when the filter includes the Cell.
* Transformations are composed in the order specified by {@link #filters}.
*/
- private Cell transformedCell = null;
+ private ServerCell transformedCell = null;
/**
* Constructor that takes a set of {@link Filter}s. The default operator
@@ -215,7 +215,7 @@ final public class FilterList extends Filter {
}
@Override
- public Cell transformCell(Cell c) throws IOException {
+ public ServerCell transformCell(ServerCell c) throws IOException {
if (!CellComparator.equals(c, referenceCell)) {
throw new IllegalStateException("Reference Cell: " + this.referenceCell + " does not match: "
+ c);
@@ -226,11 +226,11 @@ final public class FilterList extends Filter {
@Override
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",
justification="Intentional")
- public ReturnCode filterKeyValue(Cell c) throws IOException {
+ public ReturnCode filterKeyValue(ServerCell c) throws IOException {
this.referenceCell = c;
// Accumulates successive transformation of every filter that includes the Cell:
- Cell transformed = c;
+ ServerCell transformed = c;
ReturnCode rc = operator == Operator.MUST_PASS_ONE?
ReturnCode.SKIP: ReturnCode.INCLUDE;
@@ -299,7 +299,7 @@ final public class FilterList extends Filter {
* @inheritDoc
*/
@Override
- public void filterRowCells(List cells) throws IOException {
+ public void filterRowCells(List cells) throws IOException {
int listize = filters.size();
for (int i = 0; i < listize; i++) {
filters.get(i).filterRowCells(cells);
@@ -394,8 +394,8 @@ final public class FilterList extends Filter {
}
@Override
- public Cell getNextCellHint(Cell currentCell) throws IOException {
- Cell keyHint = null;
+ public ServerCell getNextCellHint(ServerCell currentCell) throws IOException {
+ ServerCell keyHint = null;
if (operator == Operator.MUST_PASS_ALL) {
keyHint = seekHintFilter.getNextCellHint(currentCell);
return keyHint;
@@ -404,7 +404,7 @@ final public class FilterList extends Filter {
// If any condition can pass, we need to keep the min hint
int listize = filters.size();
for (int i = 0; i < listize; i++) {
- Cell curKeyHint = filters.get(i).getNextCellHint(currentCell);
+ ServerCell curKeyHint = filters.get(i).getNextCellHint(currentCell);
if (curKeyHint == null) {
// If we ever don't have a hint and this is must-pass-one, then no hint
return null;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
index 5176115..5d08e45 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.List;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -96,7 +96,7 @@ final public class FilterWrapper extends Filter {
}
@Override
- public Cell getNextCellHint(Cell currentCell) throws IOException {
+ public ServerCell getNextCellHint(ServerCell currentCell) throws IOException {
return this.filter.getNextCellHint(currentCell);
}
@@ -106,12 +106,12 @@ final public class FilterWrapper extends Filter {
}
@Override
- public ReturnCode filterKeyValue(Cell v) throws IOException {
+ public ReturnCode filterKeyValue(ServerCell v) throws IOException {
return this.filter.filterKeyValue(v);
}
@Override
- public Cell transformCell(Cell v) throws IOException {
+ public ServerCell transformCell(ServerCell v) throws IOException {
return this.filter.transformCell(v);
}
@@ -121,7 +121,7 @@ final public class FilterWrapper extends Filter {
}
@Override
- public void filterRowCells(List kvs) throws IOException {
+ public void filterRowCells(List kvs) throws IOException {
filterRowCellsWithRet(kvs);
}
@@ -130,7 +130,7 @@ final public class FilterWrapper extends Filter {
INCLUDE, // corresponds to filter.filterRow() returning false
EXCLUDE // corresponds to filter.filterRow() returning true
}
- public FilterRowRetCode filterRowCellsWithRet(List kvs) throws IOException {
+ public FilterRowRetCode filterRowCellsWithRet(List kvs) throws IOException {
//To fix HBASE-6429,
//Filter with filterRow() returning true is incompatible with scan with limit
//1. hasFilterRow() returns true, if either filterRow() or filterRow(kvs) is implemented.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
index 77ed7d9..ee3781c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -47,7 +47,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
if(foundKV) return ReturnCode.NEXT_ROW;
foundKV = true;
return ReturnCode.INCLUDE;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
index 622f5ab..d41ea4a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.filter;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -61,7 +61,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
if (hasFoundKV()) {
return ReturnCode.NEXT_ROW;
} else if (hasOneMatchingQualifier(v)) {
@@ -70,7 +70,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
return ReturnCode.INCLUDE;
}
- private boolean hasOneMatchingQualifier(Cell v) {
+ private boolean hasOneMatchingQualifier(ServerCell v) {
for (byte[] q : qualifiers) {
if (CellUtil.matchingQualifier(v, q)) {
return true;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index 9b99b71..bf6af6b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -21,8 +21,8 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -85,7 +85,7 @@ public class FuzzyRowFilter extends FilterBase {
// TODO: possible improvement: save which fuzzy row key to use when providing a hint
@Override
- public ReturnCode filterKeyValue(Cell c) {
+ public ReturnCode filterKeyValue(ServerCell c) {
// assigning "worst" result first and looking for better options
SatisfiesCode bestOption = SatisfiesCode.NO_NEXT;
for (Pair fuzzyData : fuzzyKeysData) {
@@ -110,7 +110,7 @@ public class FuzzyRowFilter extends FilterBase {
}
@Override
- public Cell getNextCellHint(Cell currentCell) {
+ public ServerCell getNextCellHint(ServerCell currentCell) {
byte[] nextRowKey = null;
// Searching for the "smallest" row key that satisfies at least one fuzzy row key
for (Pair fuzzyData : fuzzyKeysData) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index cf2d153..cd45074 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -53,7 +53,7 @@ public class InclusiveStopFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
if (done) return ReturnCode.NEXT_ROW;
return ReturnCode.INCLUDE;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index 2a2b525..a1183cf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
@@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -50,11 +50,11 @@ public class KeyOnlyFilter extends FilterBase {
public KeyOnlyFilter(boolean lenAsVal) { this.lenAsVal = lenAsVal; }
@Override
- public Cell transformCell(Cell cell) {
+ public ServerCell transformCell(ServerCell cell) {
return createKeyOnlyCell(cell);
}
- private Cell createKeyOnlyCell(Cell c) {
+ private ServerCell createKeyOnlyCell(ServerCell c) {
// KV format:
// Rebuild as: <0:4>
int dataLen = lenAsVal ? Bytes.SIZEOF_INT : 0;
@@ -71,7 +71,7 @@ public class KeyOnlyFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell ignored) throws IOException {
+ public ReturnCode filterKeyValue(ServerCell ignored) throws IOException {
return ReturnCode.INCLUDE;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index bd880a0..cce01c3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -24,9 +24,9 @@ import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes;
@@ -124,12 +124,12 @@ public class MultiRowRangeFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell ignored) {
+ public ReturnCode filterKeyValue(ServerCell ignored) {
return currentReturnCode;
}
@Override
- public Cell getNextCellHint(Cell currentKV) {
+ public ServerCell getNextCellHint(ServerCell currentCell) {
// skip to the next range's start row
return KeyValueUtil.createFirstOnRow(range.startRow);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
index b7ec11a..6888a2e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
@@ -22,8 +22,8 @@ import java.util.Arrays;
import java.util.Comparator;
import java.util.TreeSet;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -64,7 +64,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell kv) {
+ public ReturnCode filterKeyValue(ServerCell kv) {
if (sortedPrefixes.size() == 0 || kv.getQualifierArray() == null) {
return ReturnCode.INCLUDE;
} else {
@@ -155,7 +155,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
}
@Override
- public Cell getNextCellHint(Cell cell) {
+ public ServerCell getNextCellHint(ServerCell cell) {
return KeyValueUtil.createFirstOnRow(
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(),
cell.getFamilyOffset(), cell.getFamilyLength(), hint, 0, hint.length);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
index 0dbd97b..a80277d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -61,7 +61,7 @@ public class PageFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell ignored) throws IOException {
+ public ReturnCode filterKeyValue(ServerCell ignored) throws IOException {
return ReturnCode.INCLUDE;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
index 5b56748..bd32a68 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -68,7 +68,7 @@ public class PrefixFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
if (filterRow) return ReturnCode.NEXT_ROW;
return ReturnCode.INCLUDE;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
index fb183f1..6de0dc8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -60,7 +60,7 @@ public class QualifierFilter extends CompareFilter {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
int qualifierLength = v.getQualifierLength();
if (qualifierLength > 0) {
if (doCompare(this.compareOp, this.comparator, v.getQualifierArray(),
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
index 2a25b32..d27acd8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter;
import java.util.Random;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -72,7 +72,7 @@ public class RandomRowFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
if (filterOutRow) {
return ReturnCode.NEXT_ROW;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
index cb4337e..c5a82e5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -66,7 +66,7 @@ public class RowFilter extends CompareFilter {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
if(this.filterOutRow) {
return ReturnCode.NEXT_ROW;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
index d030fd2..fd0b7cd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
@@ -24,8 +24,8 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -103,8 +103,8 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
// Here we remove from row all key values from testing column
@Override
- public void filterRowCells(List kvs) {
- Iterator extends Cell> it = kvs.iterator();
+ public void filterRowCells(List kvs) {
+ Iterator extends ServerCell> it = kvs.iterator();
while (it.hasNext()) {
// If the current column is actually the tested column,
// we will skip it instead.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
index d905868..8aaadff 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
@@ -24,8 +24,8 @@ import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -169,7 +169,7 @@ public class SingleColumnValueFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell c) {
+ public ReturnCode filterKeyValue(ServerCell c) {
// System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + Bytes.toString(keyValue.getValue()));
if (this.matchedColumn) {
// We already found and matched the single column, all keys now pass
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
index ce8e511..9aeef68 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -74,14 +74,14 @@ public class SkipFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell v) throws IOException {
+ public ReturnCode filterKeyValue(ServerCell v) throws IOException {
ReturnCode c = filter.filterKeyValue(v);
changeFR(c != ReturnCode.INCLUDE);
return c;
}
@Override
- public Cell transformCell(Cell v) throws IOException {
+ public ServerCell transformCell(ServerCell v) throws IOException {
return filter.transformCell(v);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
index 32a3d73..cce2238 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
@@ -21,7 +21,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -89,7 +89,7 @@ public class TimestampsFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
if (this.timestamps.contains(v.getTimestamp())) {
return ReturnCode.INCLUDE;
} else if (v.getTimestamp() < minTimeStamp) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
index a2c5eb2..24a514d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -59,7 +59,7 @@ public class ValueFilter extends CompareFilter {
}
@Override
- public ReturnCode filterKeyValue(Cell v) {
+ public ReturnCode filterKeyValue(ServerCell v) {
if (doCompare(this.compareOp, this.comparator, v.getValueArray(),
v.getValueOffset(), v.getValueLength())) {
return ReturnCode.SKIP;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
index 31d4f77..33b8773 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -73,14 +73,14 @@ public class WhileMatchFilter extends FilterBase {
}
@Override
- public ReturnCode filterKeyValue(Cell v) throws IOException {
+ public ReturnCode filterKeyValue(ServerCell v) throws IOException {
ReturnCode c = filter.filterKeyValue(v);
changeFAR(c != ReturnCode.INCLUDE);
return c;
}
@Override
- public Cell transformCell(Cell v) throws IOException {
+ public ServerCell transformCell(ServerCell v) throws IOException {
return filter.transformCell(v);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 8b5b2d7..c3c02ed 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
@@ -2513,6 +2514,14 @@ public final class ProtobufUtil {
kv.getValueLength()));
return kvbuilder.build();
}
+
+ public static CellProtos.Cell toCell(final ServerCell cell) {
+ if(cell.hasArray()){
+ return toCell((Cell)cell);
+ }
+ // TODO use getXXXBuffer APIs and make CellProtos.Cell object
+ return null;
+ }
public static Cell toCell(final CellProtos.Cell cell) {
// Doing this is going to kill us if we do it for all data passed.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java
index 540c967..c541ac8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java
@@ -48,6 +48,7 @@ public class CellComparator implements Comparator, Serializable {
return compare(a, b, false);
}
+ // TODO we will have to add ServerCell versions of compare too.
/**
* Compare cells.
* TODO: Replace with dynamic rather than static comparator so can change comparator
@@ -389,8 +390,8 @@ public class CellComparator implements Comparator, Serializable {
* @param right
* @return A cell that sorts between left and right.
*/
- public static Cell getMidpoint(final KeyValue.KVComparator comparator, final Cell left,
- final Cell right) {
+ public static ServerCell getMidpoint(final KeyValue.KVComparator comparator, final ServerCell left,
+ final ServerCell right) {
// TODO: Redo so only a single pass over the arrays rather than one to compare and then a
// second composing midpoint.
if (right == null) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index bce3957..ec104c3 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase;
import java.io.DataOutputStream;
import java.io.IOException;
+import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.List;
@@ -166,7 +167,7 @@ public final class CellUtil {
return buffer;
}
- public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier,
+ public static ServerCell createCell(final byte [] row, final byte [] family, final byte [] qualifier,
final long timestamp, final byte type, final byte [] value) {
// I need a Cell Factory here. Using KeyValue for now. TODO.
// TODO: Make a new Cell implementation that just carries these
@@ -175,7 +176,7 @@ public final class CellUtil {
return new KeyValue(row, family, qualifier, timestamp, KeyValue.Type.codeToType(type), value);
}
- public static Cell createCell(final byte [] rowArray, final int rowOffset, final int rowLength,
+ public static ServerCell createCell(final byte [] rowArray, final int rowOffset, final int rowLength,
final byte [] familyArray, final int familyOffset, final int familyLength,
final byte [] qualifierArray, final int qualifierOffset, final int qualifierLength) {
// See createCell(final byte [] row, final byte [] value) for why we default Maximum type.
@@ -215,7 +216,7 @@ public final class CellUtil {
* @param row
* @return Cell with passed row but all other fields are arbitrary
*/
- public static Cell createCell(final byte [] row) {
+ public static ServerCell createCell(final byte [] row) {
return createCell(row, HConstants.EMPTY_BYTE_ARRAY);
}
@@ -225,7 +226,7 @@ public final class CellUtil {
* @param value
* @return Cell with passed row and value but all other fields are arbitrary
*/
- public static Cell createCell(final byte [] row, final byte [] value) {
+ public static ServerCell createCell(final byte [] row, final byte [] value) {
// An empty family + empty qualifier + Type.Minimum is used as flag to indicate last on row.
// See the CellComparator and KeyValue comparator. Search for compareWithoutRow.
// Lets not make a last-on-row key as default but at same time, if you are making a key
@@ -241,7 +242,7 @@ public final class CellUtil {
* @param qualifier
* @return Cell with passed row but all other fields are arbitrary
*/
- public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier) {
+ public static ServerCell createCell(final byte [] row, final byte [] family, final byte [] qualifier) {
// See above in createCell(final byte [] row, final byte [] value) why we set type to Maximum.
return createCell(row, family, qualifier,
HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum.getCode(), HConstants.EMPTY_BYTE_ARRAY);
@@ -389,11 +390,16 @@ public final class CellUtil {
length);
}
- public static boolean matchingFamily(final Cell left, final Cell right) {
+ public static boolean matchingFamily(final ServerCell left, final ServerCell right) {
return Bytes.equals(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(),
right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength());
}
+ public static boolean matchingFamily(final ServerCell left, final byte[] buf) {
+ return Bytes.equals(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), buf,
+ 0, buf.length);
+ }
+
public static boolean matchingFamily(final Cell left, final byte[] buf) {
return Bytes.equals(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), buf,
0, buf.length);
@@ -405,12 +411,22 @@ public final class CellUtil {
offset, length);
}
+ public static boolean matchingFamily(final ServerCell left, final byte[] buf, final int offset,
+ final int length) {
+ return Bytes.equals(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), buf,
+ offset, length);
+ }
+
public static boolean matchingQualifier(final Cell left, final Cell right) {
return Bytes.equals(left.getQualifierArray(), left.getQualifierOffset(),
left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
right.getQualifierLength());
}
+ public static boolean matchingQualifier(final ServerCell left, final byte[] buf) {
+ return matchingQualifier((Cell)left, buf);
+ }
+
public static boolean matchingQualifier(final Cell left, final byte[] buf) {
if (buf == null) {
return left.getQualifierLength() == 0;
@@ -428,12 +444,19 @@ public final class CellUtil {
left.getQualifierLength(), buf, offset, length);
}
+ // TODO we will have to add ServerCell versions of methods.
public static boolean matchingColumn(final Cell left, final byte[] fam, final byte[] qual) {
if (!matchingFamily(left, fam))
return false;
return matchingQualifier(left, qual);
}
+ public static boolean matchingColumn(final ServerCell left, final byte[] fam, final byte[] qual) {
+ if (!matchingFamily(left, fam))
+ return false;
+ return matchingQualifier(left, qual);
+ }
+
public static boolean matchingColumn(final Cell left, final byte[] fam, final int foffset,
final int flength, final byte[] qual, final int qoffset, final int qlength) {
if (!matchingFamily(left, fam, foffset, flength))
@@ -441,7 +464,14 @@ public final class CellUtil {
return matchingQualifier(left, qual, qoffset, qlength);
}
- public static boolean matchingColumn(final Cell left, final Cell right) {
+ public static boolean matchingColumn(final ServerCell left, final byte[] fam, final int foffset,
+ final int flength, final byte[] qual, final int qoffset, final int qlength) {
+ if (!matchingFamily(left, fam, foffset, flength))
+ return false;
+ return matchingQualifier(left, qual, qoffset, qlength);
+ }
+
+ public static boolean matchingColumn(final ServerCell left, final ServerCell right) {
if (!matchingFamily(left, right))
return false;
return matchingQualifier(left, right);
@@ -639,11 +669,11 @@ public final class CellUtil {
* @throws IOException when the passed cell is not of type {@link SettableSequenceId}
*/
public static void setSequenceId(Cell cell, long seqId) throws IOException {
- if (cell instanceof SettableSequenceId) {
- ((SettableSequenceId) cell).setSequenceId(seqId);
+ if (cell instanceof ServerCell) {
+ ((ServerCell) cell).setSequenceId(seqId);
} else {
throw new IOException(new UnsupportedOperationException("Cell is not of type "
- + SettableSequenceId.class.getName()));
+ + ServerCell.class.getName()));
}
}
@@ -654,11 +684,11 @@ public final class CellUtil {
* @throws IOException when the passed cell is not of type {@link SettableTimestamp}
*/
public static void setTimestamp(Cell cell, long ts) throws IOException {
- if (cell instanceof SettableTimestamp) {
- ((SettableTimestamp) cell).setTimestamp(ts);
+ if (cell instanceof ServerCell) {
+ ((ServerCell) cell).setTimestamp(ts);
} else {
throw new IOException(new UnsupportedOperationException("Cell is not of type "
- + SettableTimestamp.class.getName()));
+ + ServerCell.class.getName()));
}
}
@@ -670,11 +700,11 @@ public final class CellUtil {
* @throws IOException when the passed cell is not of type {@link SettableTimestamp}
*/
public static void setTimestamp(Cell cell, byte[] ts, int tsOffset) throws IOException {
- if (cell instanceof SettableTimestamp) {
- ((SettableTimestamp) cell).setTimestamp(ts, tsOffset);
+ if (cell instanceof ServerCell) {
+ ((ServerCell) cell).setTimestamp(ts, tsOffset);
} else {
throw new IOException(new UnsupportedOperationException("Cell is not of type "
- + SettableTimestamp.class.getName()));
+ + ServerCell.class.getName()));
}
}
@@ -902,4 +932,14 @@ public final class CellUtil {
return builder.toString();
}
+
+ public static void oswrite(final Cell cell, final OutputStream out, final boolean withTags)
+ throws IOException {
+ if (cell instanceof ServerCell) {
+ ((ServerCell) cell).oswrite(out, withTags);
+ } else {
+ // TODO change to IOE?
+ throw new IllegalStateException("Got a cell which can not be written to OutputStream");
+ }
+ }
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 19e251a..841a4c1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -957,7 +957,7 @@ public final class HConstants {
* The byte array represents for NO_NEXT_INDEXED_KEY;
* The actual value is irrelevant because this is always compared by reference.
*/
- public static final Cell NO_NEXT_INDEXED_KEY = new KeyValue();
+ public static final ServerCell NO_NEXT_INDEXED_KEY = new KeyValue();
/** delimiter used between portions of a region name */
public static final int DELIMITER = ',';
public static final String HBASE_CONFIG_READ_ZOOKEEPER_CONFIG =
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 7de1f54..fb80abb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -79,7 +79,7 @@ import com.google.common.annotations.VisibleForTesting;
* and actual tag bytes length.
*/
@InterfaceAudience.Private
-public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, SettableTimestamp {
+public class KeyValue implements ServerCell, HeapSize, Cloneable {
private static final ArrayList EMPTY_ARRAY_LIST = new ArrayList();
static final Log LOG = LogFactory.getLog(KeyValue.class);
@@ -1058,10 +1058,10 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
*/
@Override
public boolean equals(Object other) {
- if (!(other instanceof Cell)) {
+ if (!(other instanceof ServerCell)) {
return false;
}
- return CellComparator.equals(this, (Cell)other);
+ return CellComparator.equals(this, (ServerCell)other);
}
/**
@@ -1945,10 +1945,12 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
return (0xff & type) - (0xff & cell.getTypeByte());
}
+ // TODO how to accept ServerCell in Comparators
public int compareOnlyKeyPortion(Cell left, Cell right) {
return CellComparator.compare(left, right, true);
}
+ // TODO we will have to add ServerCell versions of compare too.
/**
* Compares the Key of a cell -- with fields being more significant in this order:
* rowkey, colfam/qual, timestamp, type, mvcc
@@ -2822,4 +2824,14 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
return super.equals(other);
}
}
+
+ @Override
+ public boolean hasArray() {
+ return true;
+ }
+
+ @Override
+ public void oswrite(OutputStream out, boolean withTags) throws IOException {
+ oswrite(this, out, withTags);
+ }
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
index dde15bc..6afc31b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
@@ -18,15 +18,12 @@
package org.apache.hadoop.hbase;
-import java.io.IOException;
-import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.IterableUtils;
@@ -41,6 +38,7 @@ import com.google.common.collect.Lists;
*/
@InterfaceAudience.Private
public class KeyValueUtil {
+ // TODO many of the APIs in this can be removed and instead use CellUtil APIs.
/**************** length *********************/
@@ -54,7 +52,7 @@ public class KeyValueUtil {
cell.getValueLength(), cell.getTagsLength(), true);
}
- private static int length(short rlen, byte flen, int qlen, int vlen, int tlen, boolean withTags) {
+ public static int length(short rlen, byte flen, int qlen, int vlen, int tlen, boolean withTags) {
if (withTags) {
return (int) (KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen, tlen));
}
@@ -71,7 +69,7 @@ public class KeyValueUtil {
return keyLength(cell.getRowLength(), cell.getFamilyLength(), cell.getQualifierLength());
}
- private static int keyLength(short rlen, byte flen, int qlen) {
+ public static int keyLength(short rlen, byte flen, int qlen) {
return (int) KeyValue.getKeyDataStructureSize(rlen, flen, qlen);
}
@@ -541,47 +539,4 @@ public class KeyValueUtil {
});
return new ArrayList(lazyList);
}
-
- public static void oswrite(final Cell cell, final OutputStream out, final boolean withTags)
- throws IOException {
- if (cell instanceof KeyValue) {
- KeyValue.oswrite((KeyValue) cell, out, withTags);
- } else {
- short rlen = cell.getRowLength();
- byte flen = cell.getFamilyLength();
- int qlen = cell.getQualifierLength();
- int vlen = cell.getValueLength();
- int tlen = cell.getTagsLength();
-
- // write total length
- StreamUtils.writeInt(out, length(rlen, flen, qlen, vlen, tlen, withTags));
- // write key length
- StreamUtils.writeInt(out, keyLength(rlen, flen, qlen));
- // write value length
- StreamUtils.writeInt(out, vlen);
- // Write rowkey - 2 bytes rk length followed by rowkey bytes
- StreamUtils.writeShort(out, rlen);
- out.write(cell.getRowArray(), cell.getRowOffset(), rlen);
- // Write cf - 1 byte of cf length followed by the family bytes
- out.write(flen);
- out.write(cell.getFamilyArray(), cell.getFamilyOffset(), flen);
- // write qualifier
- out.write(cell.getQualifierArray(), cell.getQualifierOffset(), qlen);
- // write timestamp
- StreamUtils.writeLong(out, cell.getTimestamp());
- // write the type
- out.write(cell.getTypeByte());
- // write value
- out.write(cell.getValueArray(), cell.getValueOffset(), vlen);
- // write tags if we have to
- if (withTags) {
- // 2 bytes tags length followed by tags bytes
- // tags length is serialized with 2 bytes only(short way) even if the type is int. As this
- // is non -ve numbers, we save the sign bit. See HBASE-11437
- out.write((byte) (0xff & (tlen >> 8)));
- out.write((byte) (0xff & tlen));
- out.write(cell.getTagsArray(), cell.getTagsOffset(), tlen);
- }
- }
- }
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerCell.java
new file mode 100644
index 0000000..0d36378
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerCell.java
@@ -0,0 +1,58 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
+public interface ServerCell extends Cell {
+
+ /**
+ * @return true if the Cell is backed by byte[]. In case of BB backed Cell we
+ * return false.
+ */
+ boolean hasArray();
+
+ // TODO add the buffer backed getter APIs here
+
+ void oswrite(OutputStream out, boolean withTags) throws IOException;
+
+ /**
+ * Sets with the given seqId.
+ * @param seqId
+ */
+ void setSequenceId(long seqId) throws IOException;
+
+ /**
+ * Sets with the given timestamp.
+ * @param ts
+ */
+ void setTimestamp(long ts) throws IOException;
+
+ /**
+ * Sets with the given timestamp.
+ * @param ts buffer containing the timestamp value
+ * @param tsOffset offset to the new timestamp
+ */
+ void setTimestamp(byte[] ts, int tsOffset) throws IOException;
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableSequenceId.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableSequenceId.java
deleted file mode 100644
index 352028a..0000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableSequenceId.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-
-/**
- * Using this Interface one can mark a Cell as Sequence stampable.
- * Note : Make sure to make Cell implementation of this type in server side.
- */
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
-public interface SettableSequenceId {
-
- /**
- * Sets with the given seqId.
- * @param seqId
- */
- void setSequenceId(long seqId) throws IOException;
-}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableTimestamp.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableTimestamp.java
deleted file mode 100644
index 6dac5ae..0000000
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableTimestamp.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-
-/**
- * Using this Interface one can mark a Cell as timestamp changeable.
- * Note : Server side Cell implementations in write path must implement this.
- */
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
-public interface SettableTimestamp {
-
- /**
- * Sets with the given timestamp.
- * @param ts
- */
- void setTimestamp(long ts) throws IOException;
-
- /**
- * Sets with the given timestamp.
- * @param ts buffer containing the timestamp value
- * @param tsOffset offset to the new timestamp
- */
- void setTimestamp(byte[] ts, int tsOffset) throws IOException;
-}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
index 9d03d89..5c503fb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
@@ -25,6 +25,7 @@ import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
@@ -59,6 +60,13 @@ public class CellCodec implements Codec {
this.out.write(Bytes.toBytes(cell.getMvccVersion()));
}
+ @Override
+ public void write(ServerCell cell) throws IOException {
+ // TODO
+ // Same impl as above method. But check for hasArray and if false, deal with getXXXBuffer
+ // APIs.
+ }
+
/**
* Write int length followed by array bytes.
* @param bytes
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
index a614026..5775ab2 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
@@ -25,6 +25,7 @@ import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
@@ -61,6 +62,13 @@ public class CellCodecWithTags implements Codec {
this.out.write(Bytes.toBytes(cell.getMvccVersion()));
}
+ @Override
+ public void write(ServerCell cell) throws IOException {
+ // TODO
+ // Same impl as above method. But check for hasArray and if false, deal with getXXXBuffer
+ // APIs.
+ }
+
/**
* Write int length followed by array bytes.
*
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
index de44ec6..4ae9650 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java
@@ -23,7 +23,7 @@ import java.io.OutputStream;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.io.CellOutputStream;
+import org.apache.hadoop.hbase.io.ServerCellOutputStream;
/**
* Encoder/Decoder for Cell.
@@ -40,7 +40,7 @@ public interface Codec {
* Call flush when done. Some encoders may not put anything on the stream until flush is called.
* On flush, let go of any resources used by the encoder.
*/
- interface Encoder extends CellOutputStream {}
+ interface Encoder extends ServerCellOutputStream {}
/**
* Implementations should implicitly clean up any resources allocated when the
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
index f41d6b0..0154474 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
@@ -22,9 +22,10 @@ import java.io.InputStream;
import java.io.OutputStream;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
@@ -55,7 +56,12 @@ public class KeyValueCodec implements Codec {
public void write(Cell cell) throws IOException {
checkFlushed();
// Do not write tags over RPC
- KeyValueUtil.oswrite(cell, out, false);
+ CellUtil.oswrite(cell, out, false);
+ }
+
+ @Override
+ public void write(ServerCell cell) throws IOException {
+ write((Cell) cell);
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
index 664fcac..2f92f23 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
@@ -22,9 +22,10 @@ import java.io.InputStream;
import java.io.OutputStream;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
@@ -61,7 +62,12 @@ public class KeyValueCodecWithTags implements Codec {
public void write(Cell cell) throws IOException {
checkFlushed();
// Write tags
- KeyValueUtil.oswrite(cell, out, true);
+ CellUtil.oswrite(cell, out, true);
+ }
+
+ @Override
+ public void write(ServerCell cell) throws IOException {
+ write((Cell) cell);
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ServerCellOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ServerCellOutputStream.java
new file mode 100644
index 0000000..0eb70b7
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ServerCellOutputStream.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.ServerCell;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
+public interface ServerCellOutputStream extends CellOutputStream {
+
+ void write(ServerCell cell) throws IOException;
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
index be8c192..5bcf799 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
+import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.Cell;
@@ -30,13 +31,14 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.SettableSequenceId;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.TagCompressionContext;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.util.LRUDictionary;
+import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -306,7 +308,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
+ getValueLength() + "/seqid=" + memstoreTS;
}
- public Cell shallowCopy() {
+ public ServerCell shallowCopy() {
return new ClonedSeekerState(currentBuffer, keyBuffer, currentKey.getRowLength(),
currentKey.getFamilyOffset(), currentKey.getFamilyLength(), keyLength,
currentKey.getQualifierOffset(), currentKey.getQualifierLength(),
@@ -325,7 +327,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
// there. So this has to be an instance of SettableSequenceId. SeekerState need not be
// SettableSequenceId as we never return that to top layers. When we have to, we make
// ClonedSeekerState from it.
- protected static class ClonedSeekerState implements Cell, HeapSize, SettableSequenceId {
+ protected static class ClonedSeekerState implements ServerCell, HeapSize {
private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT
+ (4 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT)
+ (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (2 * ClassSize.ARRAY));
@@ -514,6 +516,49 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
public long heapSize() {
return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength;
}
+
+ @Override
+ public boolean hasArray() {
+ return true;
+ }
+
+ @Override
+ public void oswrite(OutputStream out, boolean withTags) throws IOException {
+ int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength,
+ tagsLength, withTags);
+ StreamUtils.writeInt(out, lenToWrite);
+ StreamUtils.writeInt(out, keyOnlyBuffer.length);
+ StreamUtils.writeInt(out, valueLength);
+ // Write key
+ out.write(keyOnlyBuffer);
+ // Write value
+ out.write(this.currentBuffer.array(), this.currentBuffer.arrayOffset() + this.valueOffset,
+ this.valueLength);
+ if (withTags) {
+ // 2 bytes tags length followed by tags bytes
+ // tags length is serialized with 2 bytes only(short way) even if the
+ // type is int. As this
+ // is non -ve numbers, we save the sign bit. See HBASE-11437
+ out.write((byte) (0xff & (this.tagsLength >> 8)));
+ out.write((byte) (0xff & this.tagsLength));
+ if (this.tagCompressionContext != null) {
+ out.write(cloneTagsBuffer);
+ } else {
+ out.write(this.currentBuffer.array(), this.currentBuffer.arrayOffset() + this.tagsOffset,
+ this.tagsLength);
+ }
+ }
+ }
+
+ @Override
+ public void setTimestamp(long ts) throws IOException {
+ this.timestamp = ts;
+ }
+
+ @Override
+ public void setTimestamp(byte[] ts, int tsOffset) throws IOException {
+ this.timestamp = Bytes.toLong(ts, tsOffset);
+ }
}
protected abstract static class
@@ -556,7 +601,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
}
@Override
- public int compareKey(KVComparator comparator, Cell key) {
+ public int compareKey(KVComparator comparator, ServerCell key) {
return comparator.compareOnlyKeyPortion(key,
new KeyValue.KeyOnlyKeyValue(current.keyBuffer, 0, current.keyLength));
}
@@ -625,7 +670,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
}
@Override
- public Cell getKeyValue() {
+ public ServerCell getKeyValue() {
return current.shallowCopy();
}
@@ -682,7 +727,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
}
@Override
- public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) {
+ public int seekToKeyInBlock(ServerCell seekCell, boolean seekBefore) {
int rowCommonPrefix = 0;
int familyCommonPrefix = 0;
int qualCommonPrefix = 0;
@@ -749,7 +794,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
}
}
} else {
- Cell r = new KeyValue.KeyOnlyKeyValue(current.keyBuffer, 0, current.keyLength);
+ ServerCell r = new KeyValue.KeyOnlyKeyValue(current.keyBuffer, 0, current.keyLength);
comp = comparator.compareOnlyKeyPortion(seekCell, r);
}
if (comp == 0) { // exact match
@@ -790,7 +835,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return 1;
}
- private int compareTypeBytes(Cell key, Cell right) {
+ private int compareTypeBytes(ServerCell key, ServerCell right) {
if (key.getFamilyLength() + key.getQualifierLength() == 0
&& key.getTypeByte() == Type.Minimum.getCode()) {
// left is "bigger", i.e. it appears later in the sorted order
@@ -846,7 +891,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
* @return unencoded size added
* @throws IOException
*/
- protected final int afterEncodingKeyValue(Cell cell, DataOutputStream out,
+ protected final int afterEncodingKeyValue(ServerCell cell, DataOutputStream out,
HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
int size = 0;
if (encodingCtx.getHFileContext().isIncludesTags()) {
@@ -978,7 +1023,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
}
@Override
- public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
+ public int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
throws IOException {
BufferedDataBlockEncodingState state = (BufferedDataBlockEncodingState) encodingCtx
.getEncodingState();
@@ -987,7 +1032,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return encodedKvSize;
}
- public abstract int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingCtx,
+ public abstract int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingCtx,
DataOutputStream out) throws IOException;
@Override
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
index 6b87c77..7cf3c00 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
@@ -21,11 +21,11 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@@ -39,7 +39,7 @@ import org.apache.hadoop.io.WritableUtils;
public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder {
@Override
- public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext,
+ public int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingContext,
DataOutputStream out) throws IOException {
int klength = KeyValueUtil.keyLength(cell);
int vlength = cell.getValueLength();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
index 872c22c..1bf8e4f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
@@ -21,8 +21,8 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
@@ -58,7 +58,7 @@ public interface DataBlockEncoder {
* @return unencoded kv size written
* @throws IOException
*/
- int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
+ int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
throws IOException;
/**
@@ -161,7 +161,7 @@ public interface DataBlockEncoder {
* @return the KeyValue object at the current position. Includes memstore
* timestamp.
*/
- Cell getKeyValue();
+ ServerCell getKeyValue();
/** Set position to beginning of given block */
void rewind();
@@ -208,7 +208,7 @@ public interface DataBlockEncoder {
* of an exact match. Does not matter in case of an inexact match.
* @return 0 on exact match, 1 on inexact match.
*/
- int seekToKeyInBlock(Cell key, boolean seekBefore);
+ int seekToKeyInBlock(ServerCell key, boolean seekBefore);
/**
* Compare the given key against the current key
@@ -220,6 +220,6 @@ public interface DataBlockEncoder {
*/
public int compareKey(KVComparator comparator, byte[] key, int offset, int length);
- public int compareKey(KVComparator comparator, Cell key);
+ public int compareKey(KVComparator comparator, ServerCell key);
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
index 4182dc4..947013f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
@@ -21,11 +21,11 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@@ -195,7 +195,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder {
}
@Override
- public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext,
+ public int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingContext,
DataOutputStream out) throws IOException {
EncodingState state = encodingContext.getEncodingState();
int size = compressSingleKeyValue(out, cell, state.prevCell);
@@ -204,7 +204,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder {
return size;
}
- private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell)
+ private int compressSingleKeyValue(DataOutputStream out, ServerCell cell, ServerCell prevCell)
throws IOException {
byte flag = 0;
int kLength = KeyValueUtil.keyLength(cell);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java
index a333a15..93d5d2b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java
@@ -18,7 +18,7 @@
*/
package org.apache.hadoop.hbase.io.encoding;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
@@ -30,5 +30,5 @@ public class EncodingState {
/**
* The previous Cell the encoder encoded.
*/
- protected Cell prevCell = null;
+ protected ServerCell prevCell = null;
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
index a6f43d0..7b44452 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
@@ -21,11 +21,11 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@@ -239,7 +239,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder {
}
@Override
- public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext,
+ public int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingContext,
DataOutputStream out) throws IOException {
EncodingState state = encodingContext.getEncodingState();
int size = compressSingleKeyValue(out, cell, state.prevCell);
@@ -248,7 +248,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder {
return size;
}
- private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell)
+ private int compressSingleKeyValue(DataOutputStream out, ServerCell cell, ServerCell prevCell)
throws IOException {
byte flag = 0;
int kLength = KeyValueUtil.keyLength(cell);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java
index 0286eca..39522cc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java
@@ -21,11 +21,11 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.util.Bytes;
public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder {
@Override
- public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext,
+ public int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingContext,
DataOutputStream out) throws IOException {
int klength = KeyValueUtil.keyLength(cell);
int vlength = cell.getValueLength();
@@ -75,7 +75,7 @@ public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder {
return size;
}
- private void writeKeyExcludingCommon(Cell cell, int commonPrefix, DataOutputStream out)
+ private void writeKeyExcludingCommon(ServerCell cell, int commonPrefix, DataOutputStream out)
throws IOException {
short rLen = cell.getRowLength();
if (commonPrefix < rLen + KeyValue.ROW_LENGTH_SIZE) {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java
index 007f826..e00495c 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -82,9 +81,9 @@ public class TestCellComparator {
public void testGetShortMidpoint() {
KeyValue.KVComparator comparator = new KeyValue.KVComparator();
- Cell left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
- Cell right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
- Cell mid = CellComparator.getMidpoint(comparator, left, right);
+ ServerCell left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
+ ServerCell right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a"));
+ ServerCell mid = CellComparator.getMidpoint(comparator, left, right);
assertTrue(CellComparator.compare(left, mid, true) <= 0);
assertTrue(CellComparator.compare(mid, right, true) <= 0);
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index e0c3bae..3c3e578 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -26,12 +26,12 @@ import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Scan;
@@ -134,9 +134,9 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
// filter and having necessary column(s).
scanner = region.getScanner(scan);
while (hasMore) {
- List> deleteRows = new ArrayList>(rowBatchSize);
+ List> deleteRows = new ArrayList>(rowBatchSize);
for (int i = 0; i < rowBatchSize; i++) {
- List results = new ArrayList();
+ List results = new ArrayList();
hasMore = NextState.hasMoreValues(scanner.next(results));
if (results.size() > 0) {
deleteRows.add(results);
@@ -149,7 +149,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
if (deleteRows.size() > 0) {
Mutation[] deleteArr = new Mutation[deleteRows.size()];
int i = 0;
- for (List deleteRow : deleteRows) {
+ for (List deleteRow : deleteRows) {
deleteArr[i++] = createDeleteMutation(deleteRow, deleteType, timestamp);
}
OperationStatus[] opStatus = region.batchMutate(deleteArr, HConstants.NO_NONCE,
@@ -191,7 +191,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
done.run(result);
}
- private Delete createDeleteMutation(List deleteRow, DeleteType deleteType,
+ private Delete createDeleteMutation(List deleteRow, DeleteType deleteType,
Long timestamp) {
long ts;
if (timestamp == null) {
@@ -204,14 +204,14 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
Delete delete = new Delete(row, ts);
if (deleteType == DeleteType.FAMILY) {
Set families = new TreeSet(Bytes.BYTES_COMPARATOR);
- for (Cell kv : deleteRow) {
+ for (ServerCell kv : deleteRow) {
if (families.add(CellUtil.cloneFamily(kv))) {
delete.deleteFamily(CellUtil.cloneFamily(kv), ts);
}
}
} else if (deleteType == DeleteType.COLUMN) {
Set columns = new HashSet();
- for (Cell kv : deleteRow) {
+ for (ServerCell kv : deleteRow) {
Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv));
if (columns.add(column)) {
// Making deleteColumns() calls more than once for the same cf:qualifier is not correct
@@ -227,13 +227,13 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
// the scan fetched will get deleted.
int noOfVersionsToDelete = 0;
if (timestamp == null) {
- for (Cell kv : deleteRow) {
+ for (ServerCell kv : deleteRow) {
delete.deleteColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp());
noOfVersionsToDelete++;
}
} else {
Set columns = new HashSet();
- for (Cell kv : deleteRow) {
+ for (ServerCell kv : deleteRow) {
Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv));
// Only one version of particular column getting deleted.
if (columns.add(column)) {
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
index 2afd05e..78d92b3 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
@@ -22,10 +22,10 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
@@ -76,13 +76,13 @@ public class RowCountEndpoint extends ExampleProtos.RowCountService
InternalScanner scanner = null;
try {
scanner = env.getRegion().getScanner(scan);
- List results = new ArrayList();
+ List results = new ArrayList();
boolean hasMore = false;
byte[] lastRow = null;
long count = 0;
do {
hasMore = NextState.hasMoreValues(scanner.next(results));
- for (Cell kv : results) {
+ for (ServerCell kv : results) {
byte[] currentRow = CellUtil.cloneRow(kv);
if (lastRow == null || !Bytes.equals(lastRow, currentRow)) {
lastRow = currentRow;
@@ -116,12 +116,12 @@ public class RowCountEndpoint extends ExampleProtos.RowCountService
InternalScanner scanner = null;
try {
scanner = env.getRegion().getScanner(new Scan());
- List results = new ArrayList();
+ List results = new ArrayList();
boolean hasMore = false;
long count = 0;
do {
hasMore = NextState.hasMoreValues(scanner.next(results));
- for (Cell kv : results) {
+ for (ServerCell kv : results) {
count++;
}
results.clear();
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
index afcb526..5b482de 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
@@ -24,12 +24,12 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.MetaComparator;
import org.apache.hadoop.hbase.KeyValue.RawBytesComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hadoop.hbase.codec.prefixtree.encode.EncoderFactory;
@@ -162,7 +162,7 @@ public class PrefixTreeCodec implements DataBlockEncoder{
}
@Override
- public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
+ public int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
throws IOException {
PrefixTreeEncodingState state = (PrefixTreeEncodingState) encodingCtx.getEncodingState();
PrefixTreeEncoder builder = state.builder;
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
index b95055c..d001ad7 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
@@ -18,21 +18,23 @@
package org.apache.hadoop.hbase.codec.prefixtree;
+import java.io.IOException;
+import java.io.OutputStream;
import java.nio.ByteBuffer;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.SettableSequenceId;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
+import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -96,8 +98,8 @@ public class PrefixTreeSeeker implements EncodedSeeker {
* currently must do deep copy into new array
*/
@Override
- public Cell getKeyValue() {
- Cell cell = ptSearcher.current();
+ public ServerCell getKeyValue() {
+ ServerCell cell = ptSearcher.current();
if (cell == null) {
return null;
}
@@ -119,7 +121,7 @@ public class PrefixTreeSeeker implements EncodedSeeker {
* The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to
* use this method instead of the getKeyValue() methods above.
*/
- public Cell get() {
+ public ServerCell get() {
return ptSearcher.current();
}
@@ -187,7 +189,7 @@ public class PrefixTreeSeeker implements EncodedSeeker {
* both. Possibly expand the EncodedSeeker to utilize them both.
*/
- protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) {
+ protected int seekToOrBeforeUsingPositionAtOrBefore(ServerCell kv, boolean seekBefore) {
// this does a deep copy of the key byte[] because the CellSearcher
// interface wants a Cell
CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);
@@ -211,7 +213,7 @@ public class PrefixTreeSeeker implements EncodedSeeker {
return seekToOrBeforeUsingPositionAtOrAfter(kv, seekBefore);
}
- protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) {
+ protected int seekToOrBeforeUsingPositionAtOrAfter(ServerCell kv, boolean seekBefore) {
// should probably switch this to use the seekForwardToOrBefore method
CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);
@@ -249,7 +251,7 @@ public class PrefixTreeSeeker implements EncodedSeeker {
}
@Override
- public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) {
+ public int seekToKeyInBlock(ServerCell key, boolean forceBeforeOnExactMatch) {
if (USE_POSITION_BEFORE) {
return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch);
} else {
@@ -258,7 +260,7 @@ public class PrefixTreeSeeker implements EncodedSeeker {
}
@Override
- public int compareKey(KVComparator comparator, Cell key) {
+ public int compareKey(KVComparator comparator, ServerCell key) {
ByteBuffer bb = getKeyDeepCopy();
return comparator.compare(key,
new KeyValue.KeyOnlyKeyValue(bb.array(), bb.arrayOffset(), bb.limit()));
@@ -268,7 +270,7 @@ public class PrefixTreeSeeker implements EncodedSeeker {
* of the key part is deep copied
*
*/
- private static class ClonedPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {
+ private static class ClonedPrefixTreeCell implements ServerCell, HeapSize {
private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT
+ (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)
+ (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));
@@ -451,5 +453,54 @@ public class PrefixTreeSeeker implements EncodedSeeker {
public long heapSize() {
return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;
}
+
+ @Override
+ public boolean hasArray() {
+ return true;
+ }
+
+ @Override
+ public void oswrite(OutputStream out, boolean withTags) throws IOException {
+ // write total length
+ StreamUtils.writeInt(out,
+ KeyValueUtil.length(rowLength, famLength, qualLength, valLength, tagsLength, withTags));
+ // write key length
+ StreamUtils.writeInt(out, KeyValueUtil.keyLength(rowLength, famLength, qualLength));
+ // write value length
+ StreamUtils.writeInt(out, valLength);
+ // Write rowkey - 2 bytes rk length followed by rowkey bytes
+ StreamUtils.writeShort(out, rowLength);
+ out.write(row);
+ // Write cf - 1 byte of cf length followed by the family bytes
+ out.write(famLength);
+ out.write(fam);
+ // write qualifier
+ out.write(qual);
+ // write timestamp
+ StreamUtils.writeLong(out, ts);
+ // write the type
+ out.write(type);
+ // write value
+ out.write(val, valOffset, valLength);
+ // write tags if we have to
+ if (withTags) {
+ // 2 bytes tags length followed by tags bytes
+ // tags length is serialized with 2 bytes only(short way) even if the type is int. As this
+ // is non -ve numbers, we save the sign bit. See HBASE-11437
+ out.write((byte) (0xff & (tagsLength >> 8)));
+ out.write((byte) (0xff & tagsLength));
+ out.write(tag);
+ }
+ }
+
+ @Override
+ public void setTimestamp(long ts) throws IOException {
+ this.ts = ts;
+ }
+
+ @Override
+ public void setTimestamp(byte[] ts, int tsOffset) throws IOException {
+ this.ts = Bytes.toLong(ts, tsOffset);
+ }
}
}
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
index cb7eeea..8734b88 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
@@ -19,9 +19,9 @@
package org.apache.hadoop.hbase.codec.prefixtree.decode;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.decode.column.ColumnReader;
import org.apache.hadoop.hbase.codec.prefixtree.decode.row.RowNodeReader;
@@ -154,11 +154,11 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne
/********************** CellScanner **********************/
@Override
- public Cell current() {
+ public ServerCell current() {
if(isOutOfBounds()){
return null;
}
- return (Cell)this;
+ return (ServerCell)this;
}
/******************* Object methods ************************/
@@ -179,7 +179,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne
*/
@Override
public String toString() {
- Cell currentCell = current();
+ ServerCell currentCell = current();
if(currentCell==null){
return "null";
}
@@ -418,7 +418,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne
/********************* fill in family/qualifier/ts/type/value ************/
- protected int populateNonRowFieldsAndCompareTo(int cellNum, Cell key) {
+ protected int populateNonRowFieldsAndCompareTo(int cellNum, ServerCell key) {
populateNonRowFields(cellNum);
return CellComparator.compare(this, key, true);
}
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java
index ec54c2a..942f89b 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java
@@ -19,8 +19,8 @@
package org.apache.hadoop.hbase.codec.prefixtree.decode;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
@@ -56,12 +56,12 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
/********************* CellSearcher methods *******************/
@Override
- public boolean positionAt(Cell key) {
+ public boolean positionAt(ServerCell key) {
return CellScannerPosition.AT == positionAtOrAfter(key);
}
@Override
- public CellScannerPosition positionAtOrBefore(Cell key) {
+ public CellScannerPosition positionAtOrBefore(ServerCell key) {
reInitFirstNode();
int fanIndex = -1;
@@ -106,7 +106,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
* if-statements. Priority on readability and debugability.
*/
@Override
- public CellScannerPosition positionAtOrAfter(Cell key) {
+ public CellScannerPosition positionAtOrAfter(ServerCell key) {
reInitFirstNode();
int fanIndex = -1;
@@ -151,7 +151,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
}
@Override
- public boolean seekForwardTo(Cell key) {
+ public boolean seekForwardTo(ServerCell key) {
if(currentPositionIsAfter(key)){
//our position is after the requested key, so can't do anything
return false;
@@ -160,7 +160,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
}
@Override
- public CellScannerPosition seekForwardToOrBefore(Cell key) {
+ public CellScannerPosition seekForwardToOrBefore(ServerCell key) {
//Do we even need this check or should upper layers avoid this situation. It's relatively
//expensive compared to the rest of the seek operation.
if(currentPositionIsAfter(key)){
@@ -172,7 +172,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
}
@Override
- public CellScannerPosition seekForwardToOrAfter(Cell key) {
+ public CellScannerPosition seekForwardToOrAfter(ServerCell key) {
//Do we even need this check or should upper layers avoid this situation. It's relatively
//expensive compared to the rest of the seek operation.
if(currentPositionIsAfter(key)){
@@ -205,11 +205,11 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
/****************** internal methods ************************/
- protected boolean currentPositionIsAfter(Cell cell){
+ protected boolean currentPositionIsAfter(ServerCell cell){
return compareTo(cell) > 0;
}
- protected CellScannerPosition positionAtQualifierTimestamp(Cell key, boolean beforeOnMiss) {
+ protected CellScannerPosition positionAtQualifierTimestamp(ServerCell key, boolean beforeOnMiss) {
int minIndex = 0;
int maxIndex = currentRowNode.getLastCellIndex();
int diff;
@@ -262,7 +262,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
* @param key Cell being searched for
* @return true if row buffer contents match key.row
*/
- protected boolean rowMatchesAfterCurrentPosition(Cell key) {
+ protected boolean rowMatchesAfterCurrentPosition(ServerCell key) {
if (!currentRowNode.hasOccurrences()) {
return false;
}
@@ -279,7 +279,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im
* @param key
* @return return -1 if key is lessThan (before) this, 0 if equal, and 1 if key is after
*/
- protected int compareToCurrentToken(Cell key) {
+ protected int compareToCurrentToken(ServerCell key) {
int startIndex = rowLength - currentRowNode.getTokenLength();
int endIndexExclusive = startIndex + currentRowNode.getTokenLength();
for (int i = startIndex; i < endIndexExclusive; ++i) {
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
index 97eed62..2b37952 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
@@ -18,13 +18,16 @@
package org.apache.hadoop.hbase.codec.prefixtree.decode;
-import org.apache.hadoop.hbase.Cell;
+import java.io.IOException;
+import java.io.OutputStream;
+
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.SettableSequenceId;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.Bytes;
/**
* As the PrefixTreeArrayScanner moves through the tree bytes, it changes the values in the fields
@@ -32,7 +35,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
* iterated through.
*/
@InterfaceAudience.Private
-public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable {
+public class PrefixTreeCell implements ServerCell, Comparable {
/********************** static **********************/
@@ -87,11 +90,11 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable |
*/
@InterfaceAudience.Private
-public class PrefixTreeEncoder implements CellOutputStream {
+public class PrefixTreeEncoder implements ServerCellOutputStream {
/**************** static ************************/
@@ -259,7 +261,7 @@ public class PrefixTreeEncoder implements CellOutputStream {
* Add a Cell to the output stream but repeat the previous row.
*/
//@Override
- public void writeWithRepeatRow(Cell cell) {
+ public void writeWithRepeatRow(ServerCell cell) {
ensurePerCellCapacities();//can we optimize away some of this?
//save a relatively expensive row comparison, incrementing the row's counter instead
@@ -269,9 +271,14 @@ public class PrefixTreeEncoder implements CellOutputStream {
addAfterRowFamilyQualifier(cell);
}
-
@Override
public void write(Cell cell) {
+ // When encoding what we will get is ServerCell. We will never get to here.
+ // TODO
+ }
+
+ @Override
+ public void write(ServerCell cell) {
ensurePerCellCapacities();
rowTokenizer.addSorted(CellUtil.fillRowRange(cell, rowRange));
@@ -282,14 +289,14 @@ public class PrefixTreeEncoder implements CellOutputStream {
}
- private void addTagPart(Cell cell) {
+ private void addTagPart(ServerCell cell) {
CellUtil.fillTagRange(cell, tagsRange);
tagsDeduplicator.add(tagsRange);
}
/***************** internal add methods ************************/
- private void addAfterRowFamilyQualifier(Cell cell){
+ private void addAfterRowFamilyQualifier(ServerCell cell){
// timestamps
timestamps[totalCells] = cell.getTimestamp();
timestampEncoder.add(cell.getTimestamp());
@@ -327,14 +334,14 @@ public class PrefixTreeEncoder implements CellOutputStream {
++totalCells;
}
- private void addFamilyPart(Cell cell) {
+ private void addFamilyPart(ServerCell cell) {
if (MULITPLE_FAMILIES_POSSIBLE || totalCells == 0) {
CellUtil.fillFamilyRange(cell, familyRange);
familyDeduplicator.add(familyRange);
}
}
- private void addQualifierPart(Cell cell) {
+ private void addQualifierPart(ServerCell cell) {
CellUtil.fillQualifierRange(cell, qualifierRange);
qualifierDeduplicator.add(qualifierRange);
}
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java
index 7e83457..1d976bf 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.codec.prefixtree.scanner;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
/**
* Methods for seeking to a random {@link Cell} inside a sorted collection of cells. Indicates that
@@ -40,7 +40,7 @@ public interface CellSearcher extends ReversibleCellScanner {
* @param key position the CellScanner exactly on this key
* @return true if the cell existed and getCurrentCell() holds a valid cell
*/
- boolean positionAt(Cell key);
+ boolean positionAt(ServerCell key);
/**
* Same as positionAt(..), but go to the extra effort of finding the previous key if there's no
@@ -51,7 +51,7 @@ public interface CellSearcher extends ReversibleCellScanner {
* BEFORE if on last cell before key
* BEFORE_FIRST if key was before the first cell in this scanner's scope
*/
- CellScannerPosition positionAtOrBefore(Cell key);
+ CellScannerPosition positionAtOrBefore(ServerCell key);
/**
* Same as positionAt(..), but go to the extra effort of finding the next key if there's no exact
@@ -62,7 +62,7 @@ public interface CellSearcher extends ReversibleCellScanner {
* AFTER if on first cell after key
* AFTER_LAST if key was after the last cell in this scanner's scope
*/
- CellScannerPosition positionAtOrAfter(Cell key);
+ CellScannerPosition positionAtOrAfter(ServerCell key);
/**
* Note: Added for backwards compatibility with
@@ -75,7 +75,7 @@ public interface CellSearcher extends ReversibleCellScanner {
* @param key position the CellScanner exactly on this key
* @return true if getCurrent() holds a valid cell
*/
- boolean seekForwardTo(Cell key);
+ boolean seekForwardTo(ServerCell key);
/**
* Same as seekForwardTo(..), but go to the extra effort of finding the next key if there's no
@@ -86,7 +86,7 @@ public interface CellSearcher extends ReversibleCellScanner {
* AFTER if on first cell after key
* AFTER_LAST if key was after the last cell in this scanner's scope
*/
- CellScannerPosition seekForwardToOrBefore(Cell key);
+ CellScannerPosition seekForwardToOrBefore(ServerCell key);
/**
* Same as seekForwardTo(..), but go to the extra effort of finding the next key if there's no
@@ -97,7 +97,7 @@ public interface CellSearcher extends ReversibleCellScanner {
* AFTER if on first cell after key
* AFTER_LAST if key was after the last cell in this scanner's scope
*/
- CellScannerPosition seekForwardToOrAfter(Cell key);
+ CellScannerPosition seekForwardToOrAfter(ServerCell key);
/**
* Note: This may not be appropriate to have in the interface. Need to investigate.
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java
index edea305..c140772 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java
@@ -24,6 +24,7 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
@@ -91,7 +92,7 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{
private void testBetween1and2(CellSearcher searcher){
CellScannerPosition p;//reuse
- Cell betweenAAndAAA = new KeyValue(AA, cf, cq, ts-2, v);
+ ServerCell betweenAAndAAA = new KeyValue(AA, cf, cq, ts-2, v);
//test exact
Assert.assertFalse(searcher.positionAt(betweenAAndAAA));
@@ -109,7 +110,7 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{
private void testBetween2and3(CellSearcher searcher){
CellScannerPosition p;//reuse
- Cell betweenAAAndB = new KeyValue(AAA, cf, cq, ts-2, v);
+ ServerCell betweenAAAndB = new KeyValue(AAA, cf, cq, ts-2, v);
//test exact
Assert.assertFalse(searcher.positionAt(betweenAAAndB));
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java
index 6c3750a..dbc4b89 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java
@@ -24,6 +24,7 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
@@ -79,7 +80,7 @@ public class TestRowDataSimple extends BaseTestRowData {
Assert.assertTrue(searcher.positionAt(d.get(3)));
Assert.assertTrue(CellComparator.equals(d.get(3), searcher.current()));
- Cell between4And5 = new KeyValue(rowB, cf, cq1, ts - 2, v0);
+ ServerCell between4And5 = new KeyValue(rowB, cf, cq1, ts - 2, v0);
// test exact
Assert.assertFalse(searcher.positionAt(between4And5));
@@ -95,7 +96,7 @@ public class TestRowDataSimple extends BaseTestRowData {
Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(5)));
// test when key falls before first key in block
- Cell beforeFirst = new KeyValue(Bytes.toBytes("A"), cf, cq0, ts, v0);
+ ServerCell beforeFirst = new KeyValue(Bytes.toBytes("A"), cf, cq0, ts, v0);
Assert.assertFalse(searcher.positionAt(beforeFirst));
p = searcher.positionAtOrBefore(beforeFirst);
Assert.assertEquals(CellScannerPosition.BEFORE_FIRST, p);
@@ -105,7 +106,7 @@ public class TestRowDataSimple extends BaseTestRowData {
Assert.assertEquals(d.get(0), searcher.current());
// test when key falls after last key in block
- Cell afterLast = new KeyValue(Bytes.toBytes("z"), cf, cq0, ts, v0);// must be lower case z
+ ServerCell afterLast = new KeyValue(Bytes.toBytes("z"), cf, cq0, ts, v0);// must be lower case z
Assert.assertFalse(searcher.positionAt(afterLast));
p = searcher.positionAtOrAfter(afterLast);
Assert.assertEquals(CellScannerPosition.AFTER_LAST, p);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java
index 313ecb8..ff9ab14 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
+import java.io.OutputStream;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize;
@@ -29,7 +30,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
* parts, refer to the original Cell.
*/
@InterfaceAudience.Private
-public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimestamp, HeapSize {
+public class TagRewriteCell implements ServerCell, HeapSize {
private Cell cell;
private byte[] tags;
@@ -39,8 +40,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta
* @param tags the tags bytes. The array suppose to contain the tags bytes alone.
*/
public TagRewriteCell(Cell cell, byte[] tags) {
- assert cell instanceof SettableSequenceId;
- assert cell instanceof SettableTimestamp;
this.cell = cell;
this.tags = tags;
// tag offset will be treated as 0 and length this.tags.length
@@ -199,4 +198,19 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta
// The incoming cell is supposed to be SettableSequenceId type.
CellUtil.setSequenceId(cell, seqId);
}
+
+ @Override
+ public boolean hasArray() {
+ return true;
+ }
+
+ @Override
+ public void oswrite(OutputStream out, boolean withTags) throws IOException {
+ // Write all the parts except the tags part
+ CellUtil.oswrite(cell, out, withTags);
+ if (withTags && this.tags != null) {
+ // Write tags if asked for
+ out.write(tags);
+ }
+ }
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
index a80a07e..899ba83 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java
@@ -26,10 +26,10 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
@@ -44,7 +44,7 @@ public class ClientSideRegionScanner extends AbstractClientScanner {
private HRegion region;
RegionScanner scanner;
- List values;
+ List values;
public ClientSideRegionScanner(Configuration conf, FileSystem fs,
Path rootDir, HTableDescriptor htd, HRegionInfo hri, Scan scan, ScanMetrics scanMetrics)
@@ -58,7 +58,7 @@ public class ClientSideRegionScanner extends AbstractClientScanner {
// create an internal region scanner
this.scanner = region.getScanner(scan);
- values = new ArrayList();
+ values = new ArrayList();
if (scanMetrics == null) {
initScanMetrics(scan);
@@ -84,7 +84,7 @@ public class ClientSideRegionScanner extends AbstractClientScanner {
Result result = Result.create(values);
if (this.scanMetrics != null) {
long resultSize = 0;
- for (Cell cell : values) {
+ for (ServerCell cell : values) {
resultSize += CellUtil.estimatedSerializedSizeOf(cell);
}
this.scanMetrics.countOfBytesInResults.addAndGet(resultSize);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java
index 6c894a5..c4a916a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
/**
@@ -58,6 +59,13 @@ public class MessageCodec implements Codec {
CellProtos.Cell pbcell = builder.build();
pbcell.writeDelimitedTo(this.out);
}
+
+ @Override
+ public void write(ServerCell cell) throws IOException {
+ // TODO
+ // Same impl as above method. But check for hasArray and if false, deal with getXXXBuffer
+ // APIs.
+ }
}
static class MessageDecoder extends BaseDecoder {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
index b6f834e..2d493ac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
@@ -26,9 +26,9 @@ import java.util.NavigableSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
@@ -82,7 +82,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
T temp;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
- List results = new ArrayList();
+ List results = new ArrayList();
byte[] colFamily = scan.getFamilies()[0];
NavigableSet qualifiers = scan.getFamilyMap().get(colFamily);
byte[] qualifier = null;
@@ -137,7 +137,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
T temp;
Scan scan = ProtobufUtil.toScan(request.getScan());
scanner = env.getRegion().getScanner(scan);
- List results = new ArrayList();
+ List results = new ArrayList();
byte[] colFamily = scan.getFamilies()[0];
NavigableSet qualifiers = scan.getFamilyMap().get(colFamily);
byte[] qualifier = null;
@@ -197,7 +197,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
if (qualifiers != null && !qualifiers.isEmpty()) {
qualifier = qualifiers.pollFirst();
}
- List results = new ArrayList();
+ List results = new ArrayList();
boolean hasMoreRows = false;
do {
hasMoreRows = NextState.hasMoreValues(scanner.next(results));
@@ -237,7 +237,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
RpcCallback done) {
AggregateResponse response = null;
long counter = 0l;
- List results = new ArrayList();
+ List results = new ArrayList();
InternalScanner scanner = null;
try {
Scan scan = ProtobufUtil.toScan(request.getScan());
@@ -308,7 +308,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
if (qualifiers != null && !qualifiers.isEmpty()) {
qualifier = qualifiers.pollFirst();
}
- List results = new ArrayList();
+ List results = new ArrayList();
boolean hasMoreRows = false;
do {
@@ -368,7 +368,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
if (qualifiers != null && !qualifiers.isEmpty()) {
qualifier = qualifiers.pollFirst();
}
- List results = new ArrayList();
+ List results = new ArrayList();
boolean hasMoreRows = false;
@@ -434,7 +434,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
// if weighted median is requested, get qualifier for the weight column
weightQualifier = qualifiers.pollLast();
}
- List results = new ArrayList();
+ List results = new ArrayList();
boolean hasMoreRows = false;
@@ -444,7 +444,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
hasMoreRows = NextState.hasMoreValues(scanner.next(results));
int listSize = results.size();
for (int i = 0; i < listSize; i++) {
- Cell kv = results.get(i);
+ ServerCell kv = results.get(i);
tempVal = ci.add(tempVal, ci.castToReturnType(ci.getValue(colFamily,
valQualifier, kv)));
if (weightQualifier != null) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
index 9e0cb9b..0850afa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
@@ -232,12 +233,12 @@ public abstract class BaseRegionObserver implements RegionObserver {
@Override
public void preGetOp(final ObserverContext e,
- final Get get, final List results) throws IOException {
+ final Get get, final List results) throws IOException {
}
@Override
public void postGetOp(final ObserverContext e,
- final Get get, final List results) throws IOException {
+ final Get get, final List results) throws IOException {
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index 7ee5a99..51153a7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
@@ -481,7 +482,7 @@ public interface RegionObserver extends Coprocessor {
* @throws IOException if an error occurred on the coprocessor
*/
void preGetOp(final ObserverContext c, final Get get,
- final List result)
+ final List result)
throws IOException;
/**
@@ -495,7 +496,7 @@ public interface RegionObserver extends Coprocessor {
* @throws IOException if an error occurred on the coprocessor
*/
void postGetOp(final ObserverContext c, final Get get,
- final List result)
+ final List result)
throws IOException;
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
index 43bbab5..4384636 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
@@ -27,9 +27,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
@@ -57,7 +57,7 @@ public class HalfStoreFileReader extends StoreFile.Reader {
// i.e. empty column and a timestamp of LATEST_TIMESTAMP.
protected final byte [] splitkey;
- protected final Cell splitCell;
+ protected final ServerCell splitCell;
private byte[] firstKey = null;
@@ -148,7 +148,7 @@ public class HalfStoreFileReader extends StoreFile.Reader {
return delegate.getValueString();
}
- public Cell getKeyValue() {
+ public ServerCell getKeyValue() {
if (atEnd) return null;
return delegate.getKeyValue();
@@ -244,7 +244,7 @@ public class HalfStoreFileReader extends StoreFile.Reader {
}
@Override
- public int seekTo(Cell key) throws IOException {
+ public int seekTo(ServerCell key) throws IOException {
if (top) {
if (getComparator().compareOnlyKeyPortion(key, splitCell) < 0) {
return -1;
@@ -266,7 +266,7 @@ public class HalfStoreFileReader extends StoreFile.Reader {
}
@Override
- public int reseekTo(Cell key) throws IOException {
+ public int reseekTo(ServerCell key) throws IOException {
// This function is identical to the corresponding seekTo function
// except
// that we call reseekTo (and not seekTo) on the delegate.
@@ -294,9 +294,9 @@ public class HalfStoreFileReader extends StoreFile.Reader {
}
@Override
- public boolean seekBefore(Cell key) throws IOException {
+ public boolean seekBefore(ServerCell key) throws IOException {
if (top) {
- Cell fk = new KeyValue.KeyOnlyKeyValue(getFirstKey(), 0, getFirstKey().length);
+ ServerCell fk = new KeyValue.KeyOnlyKeyValue(getFirstKey(), 0, getFirstKey().length);
if (getComparator().compareOnlyKeyPortion(key, fk) <= 0) {
return false;
}
@@ -319,7 +319,7 @@ public class HalfStoreFileReader extends StoreFile.Reader {
}
@Override
- public Cell getNextIndexedKey() {
+ public ServerCell getNextIndexedKey() {
return null;
}
};
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
index 52491e6..9c6eb77 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
@@ -29,11 +29,11 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -49,7 +49,7 @@ import org.apache.hadoop.io.Writable;
public abstract class AbstractHFileWriter implements HFile.Writer {
/** The Cell previously appended. Becomes the last cell in the file.*/
- protected Cell lastCell = null;
+ protected ServerCell lastCell = null;
/** FileSystem stream to write into. */
protected FSDataOutputStream outputStream;
@@ -85,7 +85,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
* First cell in a block.
* This reference should be short-lived since we write hfiles in a burst.
*/
- protected Cell firstCellInBlock = null;
+ protected ServerCell firstCellInBlock = null;
/** May be null if we were passed a stream. */
protected final Path path;
@@ -191,7 +191,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
* @return true if the key is duplicate
* @throws IOException if the key or the key order is wrong
*/
- protected boolean checkKey(final Cell cell) throws IOException {
+ protected boolean checkKey(final ServerCell cell) throws IOException {
boolean isDuplicateKey = false;
if (cell == null) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
index 4a5bb64..1b20182 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
/**
* BlockWithScanInfo is wrapper class for HFileBlock with other attributes. These attributes are
@@ -29,9 +29,9 @@ public class BlockWithScanInfo {
* The first key in the next block following this one in the HFile.
* If this key is unknown, this is reference-equal with HConstants.NO_NEXT_INDEXED_KEY
*/
- private final Cell nextIndexedKey;
+ private final ServerCell nextIndexedKey;
- public BlockWithScanInfo(HFileBlock hFileBlock, Cell nextIndexedKey) {
+ public BlockWithScanInfo(HFileBlock hFileBlock, ServerCell nextIndexedKey) {
this.hFileBlock = hFileBlock;
this.nextIndexedKey = nextIndexedKey;
}
@@ -40,7 +40,7 @@ public class BlockWithScanInfo {
return hFileBlock;
}
- public Cell getNextIndexedKey() {
+ public ServerCell getNextIndexedKey() {
return nextIndexedKey;
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
index 610fe7f..2629774 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
@@ -47,10 +47,10 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.compress.Compression;
@@ -201,7 +201,7 @@ public class HFile {
/** Add an element to the file info map. */
void appendFileInfo(byte[] key, byte[] value) throws IOException;
- void append(Cell cell) throws IOException;
+ void append(ServerCell cell) throws IOException;
/** @return the path to this {@link HFile} */
Path getPath();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 4115941..3ee28a8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -30,8 +30,8 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.ByteBufferInputStream;
@@ -861,7 +861,7 @@ public class HFileBlock implements Cacheable {
* @param cell
* @throws IOException
*/
- public void write(Cell cell) throws IOException{
+ public void write(ServerCell cell) throws IOException{
expectState(State.WRITING);
this.unencodedDataSizeWritten += this.dataBlockEncoder.encode(cell, dataBlockEncodingCtx,
this.userDataStream);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
index 77266df..bcbb471 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
@@ -35,11 +35,11 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile.CachingBlockReader;
@@ -176,7 +176,7 @@ public class HFileBlockIndex {
* @return reader a basic way to load blocks
* @throws IOException
*/
- public HFileBlock seekToDataBlock(final Cell key, HFileBlock currentBlock, boolean cacheBlocks,
+ public HFileBlock seekToDataBlock(final ServerCell key, HFileBlock currentBlock, boolean cacheBlocks,
boolean pread, boolean isCompaction, DataBlockEncoding expectedDataBlockEncoding)
throws IOException {
BlockWithScanInfo blockWithScanInfo = loadDataBlockWithScanInfo(key, currentBlock,
@@ -208,7 +208,7 @@ public class HFileBlockIndex {
* scan info such as nextIndexedKey.
* @throws IOException
*/
- public BlockWithScanInfo loadDataBlockWithScanInfo(Cell key, HFileBlock currentBlock,
+ public BlockWithScanInfo loadDataBlockWithScanInfo(ServerCell key, HFileBlock currentBlock,
boolean cacheBlocks,
boolean pread, boolean isCompaction, DataBlockEncoding expectedDataBlockEncoding)
throws IOException {
@@ -218,7 +218,7 @@ public class HFileBlockIndex {
}
// the next indexed key
- Cell nextIndexedKey = null;
+ ServerCell nextIndexedKey = null;
// Read the next-level (intermediate or leaf) index block.
long currentOffset = blockOffsets[rootLevelIndex];
@@ -422,7 +422,7 @@ public class HFileBlockIndex {
* @param key
* Key to find
*/
- public int rootBlockContainingKey(final Cell key) {
+ public int rootBlockContainingKey(final ServerCell key) {
int pos = Bytes.binarySearch(blockKeys, key, comparator);
// pos is between -(blockKeys.length + 1) to blockKeys.length - 1, see
// binarySearch's javadoc.
@@ -506,7 +506,7 @@ public class HFileBlockIndex {
* -1 otherwise
* @throws IOException
*/
- static int binarySearchNonRootIndex(Cell key, ByteBuffer nonRootIndex,
+ static int binarySearchNonRootIndex(ServerCell key, ByteBuffer nonRootIndex,
KVComparator comparator) {
int numEntries = nonRootIndex.getInt(0);
@@ -595,7 +595,7 @@ public class HFileBlockIndex {
* return -1 in the case the given key is before the first key.
*
*/
- static int locateNonRootIndexEntry(ByteBuffer nonRootBlock, Cell key,
+ static int locateNonRootIndexEntry(ByteBuffer nonRootBlock, ServerCell key,
KVComparator comparator) {
int entryIndex = binarySearchNonRootIndex(key, nonRootBlock, comparator);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
index 7ba74a2..f6ba4a5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.hfile;
import java.io.DataOutputStream;
import java.io.IOException;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
@@ -55,7 +55,7 @@ public interface HFileDataBlockEncoder {
* @return unencoded kv size
* @throws IOException
*/
- int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
+ int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
throws IOException;
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java
index 29bc292..80513d7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.hfile;
import java.io.DataOutputStream;
import java.io.IOException;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
@@ -92,7 +92,7 @@ public class HFileDataBlockEncoderImpl implements HFileDataBlockEncoder {
}
@Override
- public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
+ public int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
throws IOException {
return this.encoding.getEncoder().encode(cell, encodingCtx, out);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index 7b92df9..a3d751e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -45,11 +45,11 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -313,9 +313,9 @@ public class HFilePrettyPrinter extends Configured implements Tool {
private void scanKeysValues(Path file, KeyValueStatsCollector fileStats,
HFileScanner scanner, byte[] row) throws IOException {
- Cell pCell = null;
+ ServerCell pCell = null;
do {
- Cell cell = scanner.getKeyValue();
+ ServerCell cell = scanner.getKeyValue();
if (row != null && row.length != 0) {
int result = CellComparator.compareRows(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength(), row, 0, row.length);
@@ -461,11 +461,11 @@ public class HFilePrettyPrinter extends Configured implements Tool {
byte[] biggestRow = null;
- private Cell prevCell = null;
+ private ServerCell prevCell = null;
private long maxRowBytes = 0;
private long curRowKeyLength;
- public void collect(Cell cell) {
+ public void collect(ServerCell cell) {
valLen.update(cell.getValueLength());
if (prevCell != null &&
KeyValue.COMPARATOR.compareRows(prevCell, cell) != 0) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
index c0e3e91..f4cc726 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
@@ -28,11 +28,11 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
@@ -543,7 +543,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
protected HFileBlock block;
@Override
- public Cell getNextIndexedKey() {
+ public ServerCell getNextIndexedKey() {
return nextIndexedKey;
}
/**
@@ -553,7 +553,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
*
* If the nextIndexedKey is null, it means the nextIndexedKey has not been loaded yet.
*/
- protected Cell nextIndexedKey;
+ protected ServerCell nextIndexedKey;
public AbstractScannerV2(HFileReaderV2 r, boolean cacheBlocks,
final boolean pread, final boolean isCompaction) {
@@ -562,8 +562,8 @@ public class HFileReaderV2 extends AbstractHFileReader {
protected abstract ByteBuffer getFirstKeyInBlock(HFileBlock curBlock);
- protected abstract int loadBlockAndSeekToKey(HFileBlock seekToBlock, Cell nextIndexedKey,
- boolean rewind, Cell key, boolean seekBefore) throws IOException;
+ protected abstract int loadBlockAndSeekToKey(HFileBlock seekToBlock, ServerCell nextIndexedKey,
+ boolean rewind, ServerCell key, boolean seekBefore) throws IOException;
@Override
public int seekTo(byte[] key, int offset, int length) throws IOException {
@@ -578,12 +578,12 @@ public class HFileReaderV2 extends AbstractHFileReader {
}
@Override
- public int seekTo(Cell key) throws IOException {
+ public int seekTo(ServerCell key) throws IOException {
return seekTo(key, true);
}
@Override
- public int reseekTo(Cell key) throws IOException {
+ public int reseekTo(ServerCell key) throws IOException {
int compared;
if (isSeeked()) {
compared = compareKey(reader.getComparator(), key);
@@ -627,7 +627,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
* using a faked index key
* @throws IOException
*/
- public int seekTo(Cell key, boolean rewind) throws IOException {
+ public int seekTo(ServerCell key, boolean rewind) throws IOException {
HFileBlockIndex.BlockIndexReader indexReader = reader.getDataBlockIndexReader();
BlockWithScanInfo blockWithScanInfo = indexReader.loadDataBlockWithScanInfo(key, block,
cacheBlocks, pread, isCompaction, getEffectiveDataBlockEncoding());
@@ -645,7 +645,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
}
@Override
- public boolean seekBefore(Cell key) throws IOException {
+ public boolean seekBefore(ServerCell key) throws IOException {
HFileBlock seekToBlock = reader.getDataBlockIndexReader().seekToDataBlock(key, block,
cacheBlocks, pread, isCompaction,
((HFileReaderV2) reader).getEffectiveEncodingInCache(isCompaction));
@@ -674,7 +674,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
// TODO shortcut: seek forward in this block to the last key of the
// block.
}
- Cell firstKeyInCurrentBlock = new KeyValue.KeyOnlyKeyValue(Bytes.getBytes(firstKey));
+ ServerCell firstKeyInCurrentBlock = new KeyValue.KeyOnlyKeyValue(Bytes.getBytes(firstKey));
loadBlockAndSeekToKey(seekToBlock, firstKeyInCurrentBlock, true, key, true);
return true;
}
@@ -726,7 +726,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
public abstract int compareKey(KVComparator comparator, byte[] key, int offset,
int length);
- public abstract int compareKey(KVComparator comparator, Cell kv);
+ public abstract int compareKey(KVComparator comparator, ServerCell kv);
}
/**
@@ -742,7 +742,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
}
@Override
- public Cell getKeyValue() {
+ public ServerCell getKeyValue() {
if (!isSeeked())
return null;
@@ -879,8 +879,8 @@ public class HFileReaderV2 extends AbstractHFileReader {
}
@Override
- protected int loadBlockAndSeekToKey(HFileBlock seekToBlock, Cell nextIndexedKey,
- boolean rewind, Cell key, boolean seekBefore) throws IOException {
+ protected int loadBlockAndSeekToKey(HFileBlock seekToBlock, ServerCell nextIndexedKey,
+ boolean rewind, ServerCell key, boolean seekBefore) throws IOException {
if (block == null || block.getOffset() != seekToBlock.getOffset()) {
updateCurrBlock(seekToBlock);
} else if (rewind) {
@@ -969,7 +969,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
* less than the first key of current block(e.g. using a faked index
* key)
*/
- protected int blockSeek(Cell key, boolean seekBefore) {
+ protected int blockSeek(ServerCell key, boolean seekBefore) {
int klen, vlen;
long memstoreTS = 0;
int memstoreTSLen = 0;
@@ -1072,7 +1072,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
}
@Override
- public int compareKey(KVComparator comparator, Cell key) {
+ public int compareKey(KVComparator comparator, ServerCell key) {
return comparator.compareOnlyKeyPortion(
key,
new KeyValue.KeyOnlyKeyValue(blockBuffer.array(), blockBuffer.arrayOffset()
@@ -1203,7 +1203,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
}
@Override
- public Cell getKeyValue() {
+ public ServerCell getKeyValue() {
if (block == null) {
return null;
}
@@ -1236,8 +1236,8 @@ public class HFileReaderV2 extends AbstractHFileReader {
}
@Override
- protected int loadBlockAndSeekToKey(HFileBlock seekToBlock, Cell nextIndexedKey,
- boolean rewind, Cell key, boolean seekBefore) throws IOException {
+ protected int loadBlockAndSeekToKey(HFileBlock seekToBlock, ServerCell nextIndexedKey,
+ boolean rewind, ServerCell key, boolean seekBefore) throws IOException {
if (block == null || block.getOffset() != seekToBlock.getOffset()) {
updateCurrentBlock(seekToBlock);
} else if (rewind) {
@@ -1248,7 +1248,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
}
@Override
- public int compareKey(KVComparator comparator, Cell key) {
+ public int compareKey(KVComparator comparator, ServerCell key) {
return seeker.compareKey(comparator, key);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java
index b28d8c1..e6d7c35 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java
@@ -26,10 +26,10 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.crypto.Cipher;
@@ -245,7 +245,7 @@ public class HFileReaderV3 extends HFileReaderV2 {
* key)
*/
@Override
- protected int blockSeek(Cell key, boolean seekBefore) {
+ protected int blockSeek(ServerCell key, boolean seekBefore) {
int klen, vlen, tlen = 0;
long memstoreTS = 0;
int memstoreTSLen = 0;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
index deaa2c0..2aecfa0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
@@ -22,7 +22,7 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.ServerCell;
/**
* A scanner allows you to position yourself within a HFile and
@@ -59,7 +59,7 @@ public interface HFileScanner {
@Deprecated
int seekTo(byte[] key, int offset, int length) throws IOException;
- int seekTo(Cell c) throws IOException;
+ int seekTo(ServerCell c) throws IOException;
/**
* Reseek to or just before the passed key. Similar to seekTo
* except that this can be called even if the scanner is not at the beginning
@@ -85,7 +85,7 @@ public interface HFileScanner {
@Deprecated
int reseekTo(byte[] key, int offset, int length) throws IOException;
- int reseekTo(Cell c) throws IOException;
+ int reseekTo(ServerCell c) throws IOException;
/**
* Consider the key stream of all the keys in the file,
* k[0] .. k[n], where there are n keys in the file.
@@ -101,7 +101,7 @@ public interface HFileScanner {
@Deprecated
boolean seekBefore(byte[] key, int offset, int length) throws IOException;
- boolean seekBefore(Cell kv) throws IOException;
+ boolean seekBefore(ServerCell kv) throws IOException;
/**
* Positions this scanner at the start of the file.
* @return False if empty file; i.e. a call to next would return false and
@@ -133,7 +133,7 @@ public interface HFileScanner {
/**
* @return Instance of {@link org.apache.hadoop.hbase.KeyValue}.
*/
- Cell getKeyValue();
+ ServerCell getKeyValue();
/**
* Convenience method to get a copy of the key as a string - interpreting the
* bytes as UTF8. You must call {@link #seekTo(byte[])} before this method.
@@ -160,5 +160,5 @@ public interface HFileScanner {
/**
* @return the next key in the index (the key to seek to the next block)
*/
- Cell getNextIndexedKey();
+ ServerCell getNextIndexedKey();
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
index 28c4655..32b3afb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
@@ -31,10 +31,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable;
@@ -80,7 +80,7 @@ public class HFileWriterV2 extends AbstractHFileWriter {
* The last(stop) Cell of the previous data block.
* This reference should be short-lived since we write hfiles in a burst.
*/
- private Cell lastCellOfPreviousBlock = null;
+ private ServerCell lastCellOfPreviousBlock = null;
/** Additional data items to be written to the "load-on-open" section. */
private List additionalLoadOnOpenData =
@@ -162,7 +162,7 @@ public class HFileWriterV2 extends AbstractHFileWriter {
fsBlockWriter.writeHeaderAndData(outputStream);
int onDiskSize = fsBlockWriter.getOnDiskSizeWithHeader();
- Cell indexEntry =
+ ServerCell indexEntry =
CellComparator.getMidpoint(this.comparator, lastCellOfPreviousBlock, firstCellInBlock);
dataBlockIndexWriter.addEntry(CellUtil.getCellKeySerializedAsKeyValueKey(indexEntry),
lastDataBlockOffset, onDiskSize);
@@ -252,7 +252,7 @@ public class HFileWriterV2 extends AbstractHFileWriter {
* @throws IOException
*/
@Override
- public void append(final Cell cell) throws IOException {
+ public void append(final ServerCell cell) throws IOException {
byte[] value = cell.getValueArray();
int voffset = cell.getValueOffset();
int vlength = cell.getValueLength();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
index 086395c..6fb167c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
@@ -26,9 +26,9 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
@@ -82,7 +82,7 @@ public class HFileWriterV3 extends HFileWriterV2 {
* @throws IOException
*/
@Override
- public void append(final Cell cell) throws IOException {
+ public void append(final ServerCell cell) throws IOException {
// Currently get the complete arrays
super.append(cell);
int tagsLength = cell.getTagsLength();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java
index f75f6e9..ed51056 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java
@@ -20,10 +20,10 @@ import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
@@ -45,7 +45,7 @@ public class NoOpDataBlockEncoder implements HFileDataBlockEncoder {
}
@Override
- public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
+ public int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
throws IOException {
int klength = KeyValueUtil.keyLength(cell);
int vlength = cell.getValueLength();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
index 9c5b5af..11880bd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -110,7 +111,7 @@ public class Import extends Configured implements Tool {
}
if (filter == null || !filter.filterRowKey(row.get(), row.getOffset(), row.getLength())) {
for (Cell kv : value.rawCells()) {
- kv = filterKv(filter, kv);
+ kv = filterKv(filter, (ServerCell) kv);
// skip if we filtered it out
if (kv == null) continue;
// TODO get rid of ensureKeyValue
@@ -171,7 +172,7 @@ public class Import extends Configured implements Tool {
protected void processKV(ImmutableBytesWritable key, Result result, Context context, Put put,
Delete delete) throws IOException, InterruptedException {
for (Cell kv : result.rawCells()) {
- kv = filterKv(filter, kv);
+ kv = filterKv(filter, (ServerCell) kv);
// skip if we filter it out
if (kv == null) continue;
@@ -312,7 +313,7 @@ public class Import extends Configured implements Tool {
* @return null if the key should not be written, otherwise returns the original
* {@link KeyValue}
*/
- public static Cell filterKv(Filter filter, Cell kv) throws IOException {
+ public static Cell filterKv(Filter filter, ServerCell kv) throws IOException {
// apply the filter and skip this kv if the filter doesn't apply
if (filter != null) {
Filter.ReturnCode code = filter.filterKeyValue(kv);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSkipListSet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSkipListSet.java
index 4c3ab50..774a019 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSkipListSet.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSkipListSet.java
@@ -26,8 +26,8 @@ import java.util.SortedSet;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
@@ -45,96 +45,96 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
* get and set and won't throw ConcurrentModificationException when iterating.
*/
@InterfaceAudience.Private
-public class CellSkipListSet implements NavigableSet {
- private final ConcurrentNavigableMap delegatee;
+public class CellSkipListSet implements NavigableSet {
+ private final ConcurrentNavigableMap delegatee;
CellSkipListSet(final KeyValue.KVComparator c) {
- this.delegatee = new ConcurrentSkipListMap(c);
+ this.delegatee = new ConcurrentSkipListMap(c);
}
- CellSkipListSet(final ConcurrentNavigableMap m) {
+ CellSkipListSet(final ConcurrentNavigableMap m) {
this.delegatee = m;
}
- public Cell ceiling(Cell e) {
+ public ServerCell ceiling(ServerCell e) {
throw new UnsupportedOperationException("Not implemented");
}
- public Iterator descendingIterator() {
+ public Iterator descendingIterator() {
return this.delegatee.descendingMap().values().iterator();
}
- public NavigableSet descendingSet() {
+ public NavigableSet descendingSet() {
throw new UnsupportedOperationException("Not implemented");
}
- public Cell floor(Cell e) {
+ public ServerCell floor(ServerCell e) {
throw new UnsupportedOperationException("Not implemented");
}
- public SortedSet headSet(final Cell toElement) {
+ public SortedSet headSet(final ServerCell toElement) {
return headSet(toElement, false);
}
- public NavigableSet headSet(final Cell toElement,
+ public NavigableSet headSet(final ServerCell toElement,
boolean inclusive) {
return new CellSkipListSet(this.delegatee.headMap(toElement, inclusive));
}
- public Cell higher(Cell e) {
+ public ServerCell higher(ServerCell e) {
throw new UnsupportedOperationException("Not implemented");
}
- public Iterator iterator() {
+ public Iterator iterator() {
return this.delegatee.values().iterator();
}
- public Cell lower(Cell e) {
+ public ServerCell lower(ServerCell e) {
throw new UnsupportedOperationException("Not implemented");
}
- public Cell pollFirst() {
+ public ServerCell pollFirst() {
throw new UnsupportedOperationException("Not implemented");
}
- public Cell pollLast() {
+ public ServerCell pollLast() {
throw new UnsupportedOperationException("Not implemented");
}
- public SortedSet subSet(Cell fromElement, Cell toElement) {
+ public SortedSet subSet(ServerCell fromElement, ServerCell toElement) {
throw new UnsupportedOperationException("Not implemented");
}
- public NavigableSet subSet(Cell fromElement,
- boolean fromInclusive, Cell toElement, boolean toInclusive) {
+ public NavigableSet subSet(ServerCell fromElement,
+ boolean fromInclusive, ServerCell toElement, boolean toInclusive) {
throw new UnsupportedOperationException("Not implemented");
}
- public SortedSet tailSet(Cell fromElement) {
+ public SortedSet tailSet(ServerCell fromElement) {
return tailSet(fromElement, true);
}
- public NavigableSet tailSet(Cell fromElement, boolean inclusive) {
+ public NavigableSet tailSet(ServerCell fromElement, boolean inclusive) {
return new CellSkipListSet(this.delegatee.tailMap(fromElement, inclusive));
}
- public Comparator super Cell> comparator() {
+ public Comparator super ServerCell> comparator() {
throw new UnsupportedOperationException("Not implemented");
}
- public Cell first() {
+ public ServerCell first() {
return this.delegatee.get(this.delegatee.firstKey());
}
- public Cell last() {
+ public ServerCell last() {
return this.delegatee.get(this.delegatee.lastKey());
}
- public boolean add(Cell e) {
+ public boolean add(ServerCell e) {
return this.delegatee.put(e, e) == null;
}
- public boolean addAll(Collection extends Cell> c) {
+ public boolean addAll(Collection extends ServerCell> c) {
throw new UnsupportedOperationException("Not implemented");
}
@@ -167,7 +167,7 @@ public class CellSkipListSet implements NavigableSet {
throw new UnsupportedOperationException("Not implemented");
}
- public Cell get(Cell kv) {
+ public ServerCell get(ServerCell kv) {
return this.delegatee.get(kv);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
index 3da0c0b..32e6a0b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java
@@ -33,12 +33,12 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes;
@@ -131,10 +131,10 @@ public class DefaultMemStore implements MemStore {
}
void dump() {
- for (Cell cell: this.cellSet) {
+ for (ServerCell cell: this.cellSet) {
LOG.info(cell);
}
- for (Cell cell: this.snapshot) {
+ for (ServerCell cell: this.snapshot) {
LOG.info(cell);
}
}
@@ -221,9 +221,9 @@ public class DefaultMemStore implements MemStore {
* passed-in KV
*/
@Override
- public Pair add(Cell cell) {
- Cell toAdd = maybeCloneWithAllocator(cell);
- return new Pair(internalAdd(toAdd), toAdd);
+ public Pair add(ServerCell cell) {
+ ServerCell toAdd = maybeCloneWithAllocator(cell);
+ return new Pair(internalAdd(toAdd), toAdd);
}
@Override
@@ -231,13 +231,13 @@ public class DefaultMemStore implements MemStore {
return timeOfOldestEdit;
}
- private boolean addToCellSet(Cell e) {
+ private boolean addToCellSet(ServerCell e) {
boolean b = this.cellSet.add(e);
setOldestEditTimeToNow();
return b;
}
- private boolean removeFromCellSet(Cell e) {
+ private boolean removeFromCellSet(ServerCell e) {
boolean b = this.cellSet.remove(e);
setOldestEditTimeToNow();
return b;
@@ -255,14 +255,14 @@ public class DefaultMemStore implements MemStore {
*
* Callers should ensure they already have the read lock taken
*/
- private long internalAdd(final Cell toAdd) {
+ private long internalAdd(final ServerCell toAdd) {
long s = heapSizeChange(toAdd, addToCellSet(toAdd));
timeRangeTracker.includeTimestamp(toAdd);
this.size.addAndGet(s);
return s;
}
- private Cell maybeCloneWithAllocator(Cell cell) {
+ private ServerCell maybeCloneWithAllocator(ServerCell cell) {
if (allocator == null) {
return cell;
}
@@ -290,13 +290,13 @@ public class DefaultMemStore implements MemStore {
* @param cell
*/
@Override
- public void rollback(Cell cell) {
+ public void rollback(ServerCell cell) {
// If the key is in the snapshot, delete it. We should not update
// this.size, because that tracks the size of only the memstore and
// not the snapshot. The flush of this snapshot to disk has not
// yet started because Store.flush() waits for all rwcc transactions to
// commit before starting the flush to disk.
- Cell found = this.snapshot.get(cell);
+ ServerCell found = this.snapshot.get(cell);
if (found != null && found.getSequenceId() == cell.getSequenceId()) {
this.snapshot.remove(cell);
long sz = heapSizeChange(cell, true);
@@ -317,9 +317,9 @@ public class DefaultMemStore implements MemStore {
* @return approximate size of the passed key and value.
*/
@Override
- public long delete(Cell deleteCell) {
+ public long delete(ServerCell deleteCell) {
long s = 0;
- Cell toAdd = maybeCloneWithAllocator(deleteCell);
+ ServerCell toAdd = maybeCloneWithAllocator(deleteCell);
s += heapSizeChange(toAdd, addToCellSet(toAdd));
timeRangeTracker.includeTimestamp(toAdd);
this.size.addAndGet(s);
@@ -331,7 +331,7 @@ public class DefaultMemStore implements MemStore {
* first.
* @return Next row or null if none found.
*/
- Cell getNextRow(final Cell cell) {
+ ServerCell getNextRow(final ServerCell cell) {
return getLowest(getNextRow(cell, this.cellSet), getNextRow(cell, this.snapshot));
}
@@ -340,7 +340,7 @@ public class DefaultMemStore implements MemStore {
* @param b
* @return Return lowest of a or b or null if both a and b are null
*/
- private Cell getLowest(final Cell a, final Cell b) {
+ private ServerCell getLowest(final ServerCell a, final ServerCell b) {
if (a == null) {
return b;
}
@@ -356,12 +356,12 @@ public class DefaultMemStore implements MemStore {
* @return Next row or null if none found. If one found, will be a new
* KeyValue -- can be destroyed by subsequent calls to this method.
*/
- private Cell getNextRow(final Cell key,
- final NavigableSet set) {
- Cell result = null;
- SortedSet tail = key == null? set: set.tailSet(key);
+ private ServerCell getNextRow(final ServerCell key,
+ final NavigableSet set) {
+ ServerCell result = null;
+ SortedSet tail = key == null? set: set.tailSet(key);
// Iterate until we fall into the next row; i.e. move off current row
- for (Cell cell: tail) {
+ for (ServerCell cell: tail) {
if (comparator.compareRows(cell, key) <= 0)
continue;
// Note: Not suppressing deletes or expired cells. Needs to be handled
@@ -385,7 +385,7 @@ public class DefaultMemStore implements MemStore {
* @param set
* @param state Accumulates deletes and candidates.
*/
- private void getRowKeyAtOrBefore(final NavigableSet set,
+ private void getRowKeyAtOrBefore(final NavigableSet set,
final GetClosestRowBeforeTracker state) {
if (set.isEmpty()) {
return;
@@ -406,13 +406,13 @@ public class DefaultMemStore implements MemStore {
* @param state
* @return True if we found a candidate walking this row.
*/
- private boolean walkForwardInSingleRow(final SortedSet set,
- final Cell firstOnRow, final GetClosestRowBeforeTracker state) {
+ private boolean walkForwardInSingleRow(final SortedSet set,
+ final ServerCell firstOnRow, final GetClosestRowBeforeTracker state) {
boolean foundCandidate = false;
- SortedSet tail = set.tailSet(firstOnRow);
+ SortedSet tail = set.tailSet(firstOnRow);
if (tail.isEmpty()) return foundCandidate;
- for (Iterator i = tail.iterator(); i.hasNext();) {
- Cell kv = i.next();
+ for (Iterator i = tail.iterator(); i.hasNext();) {
+ ServerCell kv = i.next();
// Did we go beyond the target row? If so break.
if (state.isTooFar(kv, firstOnRow)) break;
if (state.isExpired(kv)) {
@@ -434,9 +434,9 @@ public class DefaultMemStore implements MemStore {
* @param set
* @param state
*/
- private void getRowKeyBefore(NavigableSet set,
+ private void getRowKeyBefore(NavigableSet set,
final GetClosestRowBeforeTracker state) {
- Cell firstOnRow = state.getTargetKey();
+ ServerCell firstOnRow = state.getTargetKey();
for (Member p = memberOfPreviousRow(set, state, firstOnRow);
p != null; p = memberOfPreviousRow(p.set, state, firstOnRow)) {
// Make sure we don't fall out of our table.
@@ -474,11 +474,11 @@ public class DefaultMemStore implements MemStore {
byte[] qualifier,
long newValue,
long now) {
- Cell firstCell = KeyValueUtil.createFirstOnRow(row, family, qualifier);
+ ServerCell firstCell = KeyValueUtil.createFirstOnRow(row, family, qualifier);
// Is there a Cell in 'snapshot' with the same TS? If so, upgrade the timestamp a bit.
- SortedSet snSs = snapshot.tailSet(firstCell);
+ SortedSet snSs = snapshot.tailSet(firstCell);
if (!snSs.isEmpty()) {
- Cell snc = snSs.first();
+ ServerCell snc = snSs.first();
// is there a matching Cell in the snapshot?
if (CellUtil.matchingRow(snc, firstCell) && CellUtil.matchingQualifier(snc, firstCell)) {
if (snc.getTimestamp() == now) {
@@ -494,8 +494,8 @@ public class DefaultMemStore implements MemStore {
// so we cant add the new Cell w/o knowing what's there already, but we also
// want to take this chance to delete some cells. So two loops (sad)
- SortedSet ss = cellSet.tailSet(firstCell);
- for (Cell cell : ss) {
+ SortedSet ss = cellSet.tailSet(firstCell);
+ for (ServerCell cell : ss) {
// if this isnt the row we are interested in, then bail:
if (!CellUtil.matchingColumn(cell, family, qualifier)
|| !CellUtil.matchingRow(cell, firstCell)) {
@@ -511,7 +511,7 @@ public class DefaultMemStore implements MemStore {
// create or update (upsert) a new Cell with
// 'now' and a 0 memstoreTS == immediately visible
- List cells = new ArrayList(1);
+ List cells = new ArrayList(1);
cells.add(new KeyValue(row, family, qualifier, now, Bytes.toBytes(newValue)));
return upsert(cells, 1L);
}
@@ -535,9 +535,9 @@ public class DefaultMemStore implements MemStore {
* @return change in memstore size
*/
@Override
- public long upsert(Iterable cells, long readpoint) {
+ public long upsert(Iterable cells, long readpoint) {
long size = 0;
- for (Cell cell : cells) {
+ for (ServerCell cell : cells) {
size += upsert(cell, readpoint);
}
return size;
@@ -557,7 +557,7 @@ public class DefaultMemStore implements MemStore {
* @param cell
* @return change in size of MemStore
*/
- private long upsert(Cell cell, long readpoint) {
+ private long upsert(ServerCell cell, long readpoint) {
// Add the Cell to the MemStore
// Use the internalAdd method here since we (a) already have a lock
// and (b) cannot safely use the MSLAB here without potentially
@@ -568,16 +568,16 @@ public class DefaultMemStore implements MemStore {
// Get the Cells for the row/family/qualifier regardless of timestamp.
// For this case we want to clean up any other puts
- Cell firstCell = KeyValueUtil.createFirstOnRow(
+ ServerCell firstCell = KeyValueUtil.createFirstOnRow(
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
- SortedSet ss = cellSet.tailSet(firstCell);
- Iterator it = ss.iterator();
+ SortedSet ss = cellSet.tailSet(firstCell);
+ Iterator it = ss.iterator();
// versions visible to oldest scanner
int versionsVisible = 0;
while ( it.hasNext() ) {
- Cell cur = it.next();
+ ServerCell cur = it.next();
if (cell == cur) {
// ignore the one just put in
@@ -615,9 +615,9 @@ public class DefaultMemStore implements MemStore {
* found in. Include set because it is carrying context.
*/
private static class Member {
- final Cell cell;
- final NavigableSet set;
- Member(final NavigableSet s, final Cell kv) {
+ final ServerCell cell;
+ final NavigableSet set;
+ Member(final NavigableSet s, final ServerCell kv) {
this.cell = kv;
this.set = s;
}
@@ -631,12 +631,12 @@ public class DefaultMemStore implements MemStore {
* member in.
* @return Null or member of row previous to firstOnRow
*/
- private Member memberOfPreviousRow(NavigableSet set,
- final GetClosestRowBeforeTracker state, final Cell firstOnRow) {
- NavigableSet head = set.headSet(firstOnRow, false);
+ private Member memberOfPreviousRow(NavigableSet set,
+ final GetClosestRowBeforeTracker state, final ServerCell firstOnRow) {
+ NavigableSet head = set.headSet(firstOnRow, false);
if (head.isEmpty()) return null;
- for (Iterator i = head.descendingIterator(); i.hasNext();) {
- Cell found = i.next();
+ for (Iterator i = head.descendingIterator(); i.hasNext();) {
+ ServerCell found = i.next();
if (state.isExpired(found)) {
i.remove();
continue;
@@ -675,23 +675,23 @@ public class DefaultMemStore implements MemStore {
*/
protected class MemStoreScanner extends NonLazyKeyValueScanner {
// Next row information for either cellSet or snapshot
- private Cell cellSetNextRow = null;
- private Cell snapshotNextRow = null;
+ private ServerCell cellSetNextRow = null;
+ private ServerCell snapshotNextRow = null;
// last iterated Cells for cellSet and snapshot (to restore iterator state after reseek)
- private Cell cellSetItRow = null;
- private Cell snapshotItRow = null;
+ private ServerCell cellSetItRow = null;
+ private ServerCell snapshotItRow = null;
// iterator based scanning.
- private Iterator cellSetIt;
- private Iterator snapshotIt;
+ private Iterator cellSetIt;
+ private Iterator snapshotIt;
// The cellSet and snapshot at the time of creating this scanner
private CellSkipListSet cellSetAtCreation;
private CellSkipListSet snapshotAtCreation;
// the pre-calculated Cell to be returned by peek() or next()
- private Cell theNext;
+ private ServerCell theNext;
// The allocator and snapshot allocator at the time of creating this scanner
volatile MemStoreLAB allocatorAtCreation;
@@ -748,9 +748,9 @@ public class DefaultMemStore implements MemStore {
* @param it
* @return Next Cell
*/
- private Cell getNext(Iterator it) {
- Cell startCell = theNext;
- Cell v = null;
+ private ServerCell getNext(Iterator it) {
+ ServerCell startCell = theNext;
+ ServerCell v = null;
try {
while (it.hasNext()) {
v = it.next();
@@ -784,7 +784,7 @@ public class DefaultMemStore implements MemStore {
* @return false if the key is null or if there is no data
*/
@Override
- public synchronized boolean seek(Cell key) {
+ public synchronized boolean seek(ServerCell key) {
if (key == null) {
close();
return false;
@@ -803,7 +803,7 @@ public class DefaultMemStore implements MemStore {
/**
* (Re)initialize the iterators after a seek or a reseek.
*/
- private synchronized boolean seekInSubLists(Cell key){
+ private synchronized boolean seekInSubLists(ServerCell key){
cellSetNextRow = getNext(cellSetIt);
snapshotNextRow = getNext(snapshotIt);
@@ -821,7 +821,7 @@ public class DefaultMemStore implements MemStore {
* @return true if there is at least one KV to read, false otherwise
*/
@Override
- public synchronized boolean reseek(Cell key) {
+ public synchronized boolean reseek(ServerCell key) {
/*
See HBASE-4195 & HBASE-3855 & HBASE-6591 for the background on this implementation.
This code is executed concurrently with flush and puts, without locks.
@@ -842,18 +842,18 @@ public class DefaultMemStore implements MemStore {
@Override
- public synchronized Cell peek() {
+ public synchronized ServerCell peek() {
//DebugPrint.println(" MS@" + hashCode() + " peek = " + getLowest());
return theNext;
}
@Override
- public synchronized Cell next() {
+ public synchronized ServerCell next() {
if (theNext == null) {
return null;
}
- final Cell ret = theNext;
+ final ServerCell ret = theNext;
// Advance one of the iterators
if (theNext == cellSetNextRow) {
@@ -876,7 +876,7 @@ public class DefaultMemStore implements MemStore {
* This uses comparator.compare() to compare the KeyValue using the memstore
* comparator.
*/
- private Cell getLowest(Cell first, Cell second) {
+ private ServerCell getLowest(ServerCell first, ServerCell second) {
if (first == null && second == null) {
return null;
}
@@ -892,7 +892,7 @@ public class DefaultMemStore implements MemStore {
* This uses comparator.compare() to compare the Cell using the memstore
* comparator.
*/
- private Cell getHighest(Cell first, Cell second) {
+ private ServerCell getHighest(ServerCell first, ServerCell second) {
if (first == null && second == null) {
return null;
}
@@ -944,7 +944,7 @@ public class DefaultMemStore implements MemStore {
* the scanner to the previous row of given key
*/
@Override
- public synchronized boolean backwardSeek(Cell key) {
+ public synchronized boolean backwardSeek(ServerCell key) {
seek(key);
if (peek() == null || comparator.compareRows(peek(), key) > 0) {
return seekToPreviousRow(key);
@@ -958,21 +958,21 @@ public class DefaultMemStore implements MemStore {
* specified key, then seek to the first KeyValue of previous row
*/
@Override
- public synchronized boolean seekToPreviousRow(Cell key) {
- Cell firstKeyOnRow = KeyValueUtil.createFirstOnRow(key.getRowArray(), key.getRowOffset(),
+ public synchronized boolean seekToPreviousRow(ServerCell key) {
+ ServerCell firstKeyOnRow = KeyValueUtil.createFirstOnRow(key.getRowArray(), key.getRowOffset(),
key.getRowLength());
- SortedSet cellHead = cellSetAtCreation.headSet(firstKeyOnRow);
- Cell cellSetBeforeRow = cellHead.isEmpty() ? null : cellHead.last();
- SortedSet snapshotHead = snapshotAtCreation
+ SortedSet cellHead = cellSetAtCreation.headSet(firstKeyOnRow);
+ ServerCell cellSetBeforeRow = cellHead.isEmpty() ? null : cellHead.last();
+ SortedSet snapshotHead = snapshotAtCreation
.headSet(firstKeyOnRow);
- Cell snapshotBeforeRow = snapshotHead.isEmpty() ? null : snapshotHead
+ ServerCell snapshotBeforeRow = snapshotHead.isEmpty() ? null : snapshotHead
.last();
- Cell lastCellBeforeRow = getHighest(cellSetBeforeRow, snapshotBeforeRow);
+ ServerCell lastCellBeforeRow = getHighest(cellSetBeforeRow, snapshotBeforeRow);
if (lastCellBeforeRow == null) {
theNext = null;
return false;
}
- Cell firstKeyOnPreviousRow = KeyValueUtil.createFirstOnRow(lastCellBeforeRow.getRowArray(),
+ ServerCell firstKeyOnPreviousRow = KeyValueUtil.createFirstOnRow(lastCellBeforeRow.getRowArray(),
lastCellBeforeRow.getRowOffset(), lastCellBeforeRow.getRowLength());
this.stopSkippingCellsIfNextRow = true;
seek(firstKeyOnPreviousRow);
@@ -986,15 +986,15 @@ public class DefaultMemStore implements MemStore {
@Override
public synchronized boolean seekToLastRow() {
- Cell first = cellSetAtCreation.isEmpty() ? null : cellSetAtCreation
+ ServerCell first = cellSetAtCreation.isEmpty() ? null : cellSetAtCreation
.last();
- Cell second = snapshotAtCreation.isEmpty() ? null
+ ServerCell second = snapshotAtCreation.isEmpty() ? null
: snapshotAtCreation.last();
- Cell higherCell = getHighest(first, second);
+ ServerCell higherCell = getHighest(first, second);
if (higherCell == null) {
return false;
}
- Cell firstCellOnLastRow = KeyValueUtil.createFirstOnRow(higherCell.getRowArray(),
+ ServerCell firstCellOnLastRow = KeyValueUtil.createFirstOnRow(higherCell.getRowArray(),
higherCell.getRowOffset(), higherCell.getRowLength());
if (seek(firstCellOnLastRow)) {
return true;
@@ -1019,7 +1019,7 @@ public class DefaultMemStore implements MemStore {
* @param notpresent True if the cell was NOT present in the set.
* @return Size
*/
- static long heapSizeChange(final Cell cell, final boolean notpresent) {
+ static long heapSizeChange(final ServerCell cell, final boolean notpresent) {
return notpresent ? ClassSize.align(ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY
+ CellUtil.estimatedHeapSizeOf(cell)) : 0;
}
@@ -1063,19 +1063,19 @@ public class DefaultMemStore implements MemStore {
byte [] empty = new byte[0];
for (int i = 0; i < count; i++) {
// Give each its own ts
- Pair ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty));
+ Pair ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty));
size += ret.getFirst();
}
LOG.info("memstore1 estimated size=" + size);
for (int i = 0; i < count; i++) {
- Pair ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty));
+ Pair ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty));
size += ret.getFirst();
}
LOG.info("memstore1 estimated size (2nd loading of same data)=" + size);
// Make a variably sized memstore.
DefaultMemStore memstore2 = new DefaultMemStore();
for (int i = 0; i < count; i++) {
- Pair ret = memstore2.add(new KeyValue(Bytes.toBytes(i), fam, qf, i,
+ Pair ret = memstore2.add(new KeyValue(Bytes.toBytes(i), fam, qf, i,
new byte[i]));
size += ret.getFirst();
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
index 8305b99..cfc2e91 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java
@@ -28,9 +28,9 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
@@ -110,7 +110,7 @@ class DefaultStoreFileManager implements StoreFileManager {
@Override
public Iterator updateCandidateFilesForRowKeyBefore(
- Iterator candidateFiles, final KeyValue targetKey, final Cell candidate) {
+ Iterator candidateFiles, final KeyValue targetKey, final ServerCell candidate) {
// Default store has nothing useful to do here.
// TODO: move this comment when implementing Level:
// Level store can trim the list by range, removing all the files which cannot have
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
index 70254fe..bffd104 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
@@ -18,8 +18,8 @@
*/
package org.apache.hadoop.hbase.regionserver;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
/**
* This interface is used for the tracking and enforcement of Deletes
@@ -40,7 +40,7 @@ public interface DeleteTracker {
* This is called when a Delete is encountered in a StoreFile.
* @param cell - the delete cell
*/
- void add(Cell cell);
+ void add(ServerCell cell);
/**
* Check if the specified cell buffer has been deleted by a previously
@@ -48,7 +48,7 @@ public interface DeleteTracker {
* @param cell - current cell to check if deleted by a previously seen delete
* @return deleteResult The result tells whether the KeyValue is deleted and why
*/
- DeleteResult isDeleted(Cell cell);
+ DeleteResult isDeleted(ServerCell cell);
/**
* @return true if there are no current delete, false otherwise
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java
index 4d22c0e..68bee57 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java
@@ -24,12 +24,12 @@ import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -44,7 +44,7 @@ class GetClosestRowBeforeTracker {
// Any cell w/ a ts older than this is expired.
private final long now;
private final long oldestUnexpiredTs;
- private Cell candidate = null;
+ private ServerCell candidate = null;
private final KVComparator kvcomparator;
// Flag for whether we're doing getclosest on a metaregion.
private final boolean metaregion;
@@ -53,7 +53,7 @@ class GetClosestRowBeforeTracker {
private final int tablenamePlusDelimiterLength;
// Deletes keyed by row. Comparator compares on row portion of KeyValue only.
- private final NavigableMap> deletes;
+ private final NavigableMap> deletes;
/**
* @param c
@@ -79,17 +79,17 @@ class GetClosestRowBeforeTracker {
this.now = System.currentTimeMillis();
this.oldestUnexpiredTs = now - ttl;
this.kvcomparator = c;
- this.deletes = new TreeMap>(new CellComparator.RowComparator());
+ this.deletes = new TreeMap>(new CellComparator.RowComparator());
}
/*
* Add the specified KeyValue to the list of deletes.
* @param kv
*/
- private void addDelete(final Cell kv) {
- NavigableSet rowdeletes = this.deletes.get(kv);
+ private void addDelete(final ServerCell kv) {
+ NavigableSet rowdeletes = this.deletes.get(kv);
if (rowdeletes == null) {
- rowdeletes = new TreeSet(this.kvcomparator);
+ rowdeletes = new TreeSet(this.kvcomparator);
this.deletes.put(kv, rowdeletes);
}
rowdeletes.add(kv);
@@ -99,7 +99,7 @@ class GetClosestRowBeforeTracker {
* @param kv Adds candidate if nearer the target than previous candidate.
* @return True if updated candidate.
*/
- private boolean addCandidate(final Cell kv) {
+ private boolean addCandidate(final ServerCell kv) {
if (!isDeleted(kv) && isBetterCandidate(kv)) {
this.candidate = kv;
return true;
@@ -107,7 +107,7 @@ class GetClosestRowBeforeTracker {
return false;
}
- boolean isBetterCandidate(final Cell contender) {
+ boolean isBetterCandidate(final ServerCell contender) {
return this.candidate == null ||
(this.kvcomparator.compareRows(this.candidate, contender) < 0 &&
this.kvcomparator.compareRows(contender, this.targetkey) <= 0);
@@ -119,9 +119,9 @@ class GetClosestRowBeforeTracker {
* @param kv
* @return true is the specified KeyValue is deleted, false if not
*/
- private boolean isDeleted(final Cell kv) {
+ private boolean isDeleted(final ServerCell kv) {
if (this.deletes.isEmpty()) return false;
- NavigableSet rowdeletes = this.deletes.get(kv);
+ NavigableSet rowdeletes = this.deletes.get(kv);
if (rowdeletes == null || rowdeletes.isEmpty()) return false;
return isDeleted(kv, rowdeletes);
}
@@ -133,9 +133,9 @@ class GetClosestRowBeforeTracker {
* @param ds
* @return True is the specified KeyValue is deleted, false if not
*/
- public boolean isDeleted(final Cell kv, final NavigableSet ds) {
+ public boolean isDeleted(final ServerCell kv, final NavigableSet ds) {
if (deletes == null || deletes.isEmpty()) return false;
- for (Cell d: ds) {
+ for (ServerCell d: ds) {
long kvts = kv.getTimestamp();
long dts = d.getTimestamp();
if (CellUtil.isDeleteFamily(d)) {
@@ -170,7 +170,7 @@ class GetClosestRowBeforeTracker {
* @param cell
* @return true if the cell is expired
*/
- public boolean isExpired(final Cell cell) {
+ public boolean isExpired(final ServerCell cell) {
return cell.getTimestamp() < this.oldestUnexpiredTs ||
HStore.isCellTTLExpired(cell, this.oldestUnexpiredTs, this.now);
}
@@ -183,7 +183,7 @@ class GetClosestRowBeforeTracker {
* @param kv
* @return True if we removed k from candidates.
*/
- boolean handleDeletes(final Cell kv) {
+ boolean handleDeletes(final ServerCell kv) {
addDelete(kv);
boolean deleted = false;
if (!hasCandidate()) return deleted;
@@ -199,7 +199,7 @@ class GetClosestRowBeforeTracker {
* @param kv
* @return True if we added a candidate
*/
- boolean handle(final Cell kv) {
+ boolean handle(final ServerCell kv) {
if (CellUtil.isDelete(kv)) {
handleDeletes(kv);
return false;
@@ -217,7 +217,7 @@ class GetClosestRowBeforeTracker {
/**
* @return Best candidate or null.
*/
- public Cell getCandidate() {
+ public ServerCell getCandidate() {
return this.candidate;
}
@@ -230,11 +230,11 @@ class GetClosestRowBeforeTracker {
* @param firstOnRow on row kv.
* @return True if we went too far, past the target key.
*/
- boolean isTooFar(final Cell kv, final Cell firstOnRow) {
+ boolean isTooFar(final ServerCell kv, final ServerCell firstOnRow) {
return this.kvcomparator.compareRows(kv, firstOnRow) > 0;
}
- boolean isTargetTable(final Cell kv) {
+ boolean isTargetTable(final ServerCell kv) {
if (!metaregion) return true;
// Compare start of keys row. Compare including delimiter. Saves having
// to calculate where tablename ends in the candidate kv.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 83127d2..fe03bd3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -86,6 +86,7 @@ import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionTooBusyException;
+import org.apache.hadoop.hbase.ServerCell;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
@@ -2552,7 +2553,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
void updateDeleteLatestVersionTimeStamp(Cell cell, Get get, int count, byte[] byteNow)
throws IOException {
- List result = get(get, false);
+ List result = get(get, false);
if (result.size() < count) {
// Nothing to delete
@@ -3050,7 +3051,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
WALEdit fromCP = batchOp.walEditsFromCoprocessors[i];
if (fromCP != null) {
for (Cell cell : fromCP.getCells()) {
- walEdit.add(cell);
+ walEdit.add((ServerCell) cell);
}
}
addFamilyMapToWALEdit(familyMaps[i], walEdit);
@@ -3255,7 +3256,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
return processed;
}
}
- List result = get(get, false);
+ List result = get(get, false);
boolean valueIsNull = comparator.getValue() == null ||
comparator.getValue().length == 0;
@@ -3335,7 +3336,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// wait for all previous transactions to complete (with lock held)
mvcc.waitForPreviousTransactionsComplete();
try {
- List result = get(get, false);
+ List result = get(get, false);
boolean valueIsNull = comparator.getValue() == null ||
comparator.getValue().length == 0;
@@ -3577,7 +3578,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
for (int i=0; i < listSize; i++) {
Cell cell = cells.get(i);
CellUtil.setSequenceId(cell, mvccNum);
- Pair ret = store.add(cell);
+ Pair ret = store.add((ServerCell)cell);
size += ret.getFirst();
memstoreCells.add(ret.getSecond());
if(isInReplay) {
@@ -3601,7 +3602,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
for (Cell cell : memstoreCells) {
byte[] family = CellUtil.cloneFamily(cell);
Store store = getStore(family);
- store.rollback(cell);
+ store.rollback((ServerCell)cell);
kvsRolledback++;
}
LOG.debug("rollbackMemstore rolled back " + kvsRolledback);
@@ -3673,7 +3674,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
int listSize = edits.size();
for (int i=0; i < listSize; i++) {
Cell cell = edits.get(i);
- walEdit.add(cell);
+ walEdit.add((ServerCell) cell);
}
}
}
@@ -4812,7 +4813,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* @return True if we should flush.
*/
protected boolean restoreEdit(final Store s, final Cell cell) {
- long kvSize = s.add(cell).getFirst();
+ long kvSize = s.add((ServerCell) cell).getFirst();
if (this.rsAccounting != null) {
rsAccounting.addAndGetRegionReplayEditsSize(getRegionInfo().getRegionName(), kvSize);
}
@@ -5171,7 +5172,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
/**
* If the joined heap data gathering is interrupted due to scan limits, this will
* contain the row for which we are populating the values.*/
- protected Cell joinedContinuationRow = null;
+ protected ServerCell joinedContinuationRow = null;
protected final byte[] stopRow;
private final FilterWrapper filter;
private int batch;
@@ -5273,19 +5274,19 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
}
@Override
- public NextState next(List outResults)
+ public NextState next(List outResults)
throws IOException {
// apply the batching limit by default
return next(outResults, batch);
}
@Override
- public NextState next(List outResults, int limit) throws IOException {
+ public NextState next(List outResults, int limit) throws IOException {
return next(outResults, limit, -1);
}
@Override
- public synchronized NextState next(List outResults, int limit, long remainingResultSize)
+ public synchronized NextState next(List outResults, int limit, long remainingResultSize)
throws IOException {
if (this.filterClosed) {
throw new UnknownScannerException("Scanner was closed (timed out?) " +
@@ -5302,18 +5303,18 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
}
@Override
- public NextState nextRaw(List outResults) throws IOException {
+ public NextState nextRaw(List outResults) throws IOException {
return nextRaw(outResults, batch);
}
@Override
- public NextState nextRaw(List outResults, int limit)
+ public NextState nextRaw(List outResults, int limit)
throws IOException {
return nextRaw(outResults, limit, -1);
}
@Override
- public NextState nextRaw(List outResults, int batchLimit, long remainingResultSize)
+ public NextState nextRaw(List outResults, int batchLimit, long remainingResultSize)
throws IOException {
if (storeHeap == null) {
// scanner is closed
@@ -5325,7 +5326,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// to handle scan or get operation.
state = nextInternal(outResults, batchLimit, remainingResultSize);
} else {
- List tmpList = new ArrayList();
+ List tmpList = new ArrayList();
state = nextInternal(tmpList, batchLimit, remainingResultSize);
outResults.addAll(tmpList);
}
@@ -5350,7 +5351,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* @return the state the joinedHeap returned on the call to
* {@link KeyValueHeap#next(List, int, long)}
*/
- private NextState populateFromJoinedHeap(List results, int limit, long resultSize)
+ private NextState populateFromJoinedHeap(List results, int limit, long resultSize)
throws IOException {
assert joinedContinuationRow != null;
NextState state =
@@ -5379,12 +5380,12 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* @param length length for currentRow
* @return state of last call to {@link KeyValueHeap#next()}
*/
- private NextState populateResult(List results, KeyValueHeap heap, int batchLimit,
+ private NextState populateResult(List results, KeyValueHeap heap, int batchLimit,
long remainingResultSize, byte[] currentRow, int offset, short length) throws IOException {
- Cell nextKv;
+ ServerCell nextKv;
boolean moreCellsInRow = false;
long accumulatedResultSize = 0;
- List tmpResults = new ArrayList();
+ List tmpResults = new ArrayList();
do {
int remainingBatchLimit = batchLimit - results.size();
NextState heapState =
@@ -5423,7 +5424,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* @param length
* @return true When there are more cells in the row to be read
*/
- private boolean moreCellsInRow(final Cell nextKv, byte[] currentRow, int offset,
+ private boolean moreCellsInRow(final ServerCell nextKv, byte[] currentRow, int offset,
short length) {
return nextKv != null && CellUtil.matchingRow(nextKv, currentRow, offset, length);
}
@@ -5436,7 +5437,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* @param state The state returned from the scanner that generated these results
* @return aggregate size of results
*/
- private long calculateResultSize(List results, NextState state) {
+ private long calculateResultSize(List results, NextState state) {
if (results == null || results.isEmpty()) return 0;
// In general, the state should contain the estimate because the result size used to
@@ -5445,7 +5446,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
if (state != null && state.hasResultSizeEstimate()) return state.getResultSize();
long size = 0;
- for (Cell c : results) {
+ for (ServerCell c : results) {
size += CellUtil.estimatedHeapSizeOfWithoutTags(c);
}
@@ -5464,7 +5465,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
return this.filter != null && this.filter.filterAllRemaining();
}
- private NextState nextInternal(List results, int batchLimit, long remainingResultSize)
+ private NextState nextInternal(List results, int batchLimit, long remainingResultSize)
throws IOException {
if (!results.isEmpty()) {
throw new IllegalArgumentException("First parameter should be an empty list");
@@ -5493,7 +5494,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
}
// Let's see what we have in the storeHeap.
- Cell current = this.storeHeap.peek();
+ ServerCell current = this.storeHeap.peek();
byte[] currentRow = null;
int offset = 0;
@@ -5570,7 +5571,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// We hit the size limit.
return NextState.makeState(NextState.State.SIZE_LIMIT_REACHED, resultSize);
}
- Cell nextKv = this.storeHeap.peek();
+ ServerCell nextKv = this.storeHeap.peek();
stopRow = nextKv == null ||
isStopRow(nextKv.getRowArray(), nextKv.getRowOffset(), nextKv.getRowLength());
// save that the row was empty before filters applied to it.
@@ -5599,7 +5600,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// These values are not needed for filter to work, so we postpone their
// fetch to (possibly) reduce amount of data loads from disk.
if (this.joinedHeap != null) {
- Cell nextJoinedKv = joinedHeap.peek();
+ ServerCell nextJoinedKv = joinedHeap.peek();
// If joinedHeap is pointing to some other row, try to seek to a correct one.
boolean mayHaveData = (nextJoinedKv != null && CellUtil.matchingRow(nextJoinedKv,
currentRow, offset, length))
@@ -5675,7 +5676,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
protected boolean nextRow(byte [] currentRow, int offset, short length) throws IOException {
assert this.joinedContinuationRow == null: "Trying to go to next row during joinedHeap read.";
- Cell next;
+ ServerCell next;
while ((next = this.storeHeap.peek()) != null &&
CellUtil.matchingRow(next, currentRow, offset, length)) {
this.storeHeap.next(MOCKED_LIST);
@@ -6292,15 +6293,15 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
get.addFamily(family);
}
}
- List results = get(get, true);
+ List results = get(get, true);
boolean stale = this.getRegionInfo().getReplicaId() != 0;
return Result.create(results, get.isCheckExistenceOnly() ? !results.isEmpty() : null, stale);
}
@Override
- public List get(Get get, boolean withCoprocessor) throws IOException {
+ public List get(Get get, boolean withCoprocessor) throws IOException {
- List results = new ArrayList| ();
+ List results = new ArrayList();
// pre-get CP hook
if (withCoprocessor && (coprocessorHost != null)) {
@@ -6328,7 +6329,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// do after lock
if (this.metricsRegion != null) {
long totalSize = 0L;
- for (Cell cell : results) {
+ for (ServerCell cell : results) {
totalSize += CellUtil.estimatedSerializedSizeOf(cell);
}
this.metricsRegion.updateGet(totalSize);
@@ -6478,7 +6479,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
checkFamily(CellUtil.cloneFamily(cell));
// unreachable
}
- Pair ret = store.add(cell);
+ Pair | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |