diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java index b6600a9..c599f06 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java @@ -138,19 +138,19 @@ public class Result implements CellScannable, CellScanner { *
Note: You must ensure that the keyvalues are already sorted. * @param cells List of cells */ - public static Result create(List cells) { + public static Result create(List cells) { return create(cells, null); } - public static Result create(List cells, Boolean exists) { + public static Result create(List cells, Boolean exists) { return create(cells, exists, false); } - public static Result create(List cells, Boolean exists, boolean stale) { + public static Result create(List cells, Boolean exists, boolean stale) { return create(cells, exists, stale, false); } - public static Result create(List cells, Boolean exists, boolean stale, boolean partial) { + public static Result create(List cells, Boolean exists, boolean stale, boolean partial) { if (exists != null){ return new Result(null, exists, stale, partial); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java index 97724bd..f791b51 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java @@ -24,6 +24,7 @@ import java.math.RoundingMode; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg; @@ -51,6 +52,20 @@ public class BigDecimalColumnInterpreter extends ColumnInterpreter{ @Override - public Double getValue(byte[] colFamily, byte[] colQualifier, Cell c) + public Double getValue(byte[] colFamily, byte[] colQualifier, Cell c) throws IOException { + return null; + } + + @Override + public Double getValue(byte[] colFamily, byte[] colQualifier, ServerCell c) throws IOException { if (c == null || c.getValueLength() != Bytes.SIZEOF_DOUBLE) return null; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java index e8e5e3a..70d2938 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.client.coprocessor; import java.io.IOException; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg; @@ -45,6 +46,13 @@ public class LongColumnInterpreter extends ColumnInterpreter { throws IOException; /** + * + * @param colFamily + * @param colQualifier + * @param c + * @return value of type T + * @throws IOException + */ + public abstract T getValue(byte[] colFamily, byte[] colQualifier, ServerCell c) + throws IOException; + + /** * @param l1 * @param l2 * @return sum or non null value among (if either of them is null); otherwise diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index 572de9f..fd3011c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -57,7 +57,7 @@ public class ColumnCountGetFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { this.count++; return filterAllRemaining() ? ReturnCode.NEXT_COL : ReturnCode.INCLUDE_AND_NEXT_COL; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index 673ca6e..6c0ef68 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -105,7 +105,7 @@ public class ColumnPaginationFilter extends FilterBase } @Override - public ReturnCode filterKeyValue(Cell v) + public ReturnCode filterKeyValue(ServerCell v) { if (columnOffset != null) { if (count >= limit) { @@ -144,7 +144,7 @@ public class ColumnPaginationFilter extends FilterBase } @Override - public Cell getNextCellHint(Cell cell) { + public ServerCell getNextCellHint(ServerCell cell) { return KeyValueUtil.createFirstOnRow( cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), columnOffset, 0, columnOffset.length); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index d2f058a..d56ded3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -52,7 +52,7 @@ public class ColumnPrefixFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell kv) { + public ReturnCode filterKeyValue(ServerCell kv) { if (this.prefix == null || kv.getQualifierArray() == null) { return ReturnCode.INCLUDE; } else { @@ -130,7 +130,7 @@ public class ColumnPrefixFilter extends FilterBase { } @Override - public Cell getNextCellHint(Cell cell) { + public ServerCell getNextCellHint(ServerCell cell) { return KeyValueUtil.createFirstOnRow( cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), prefix, 0, prefix.length); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index d8ea094..e0f1fd7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -23,8 +23,8 @@ import static org.apache.hadoop.hbase.util.Bytes.len; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -116,7 +116,7 @@ public class ColumnRangeFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell kv) { + public ReturnCode filterKeyValue(ServerCell kv) { // TODO have a column compare method in Cell byte[] buffer = kv.getQualifierArray(); int qualifierOffset = kv.getQualifierOffset(); @@ -216,7 +216,7 @@ public class ColumnRangeFilter extends FilterBase { } @Override - public Cell getNextCellHint(Cell cell) { + public ServerCell getNextCellHint(ServerCell cell) { return KeyValueUtil.createFirstOnRow(cell.getRowArray(), cell.getRowOffset(), cell .getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell .getFamilyLength(), this.minColumn, 0, len(this.minColumn)); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index 6d19842..731fa65 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -25,8 +25,8 @@ import java.util.Iterator; import java.util.List; import java.util.Set; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -134,7 +134,7 @@ public class DependentColumnFilter extends CompareFilter { } @Override - public ReturnCode filterKeyValue(Cell c) { + public ReturnCode filterKeyValue(ServerCell c) { // Check if the column and qualifier match if (!CellUtil.matchingColumn(c, this.columnFamily, this.columnQualifier)) { // include non-matches for the time being, they'll be discarded afterwards @@ -154,12 +154,12 @@ public class DependentColumnFilter extends CompareFilter { } @Override - public void filterRowCells(List kvs) { - Iterator it = kvs.iterator(); - Cell kv; + public void filterRowCells(List cells) { + Iterator it = cells.iterator(); + ServerCell cell; while(it.hasNext()) { - kv = it.next(); - if(!stampSet.contains(kv.getTimestamp())) { + cell = it.next(); + if(!stampSet.contains(cell.getTimestamp())) { it.remove(); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index e79a4d5..1edf6bc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -60,7 +60,7 @@ public class FamilyFilter extends CompareFilter { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { int familyLength = v.getFamilyLength(); if (familyLength > 0) { if (doCompare(this.compareOp, this.comparator, v.getFamilyArray(), diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java index 88bf842..d26ade1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.List; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -113,7 +113,7 @@ public abstract class Filter { * @throws IOException in case an I/O or an filter specific failure needs to be signaled. * @see Filter.ReturnCode */ - abstract public ReturnCode filterKeyValue(final Cell v) throws IOException; + abstract public ReturnCode filterKeyValue(final ServerCell v) throws IOException; /** * Give the filter a chance to transform the passed KeyValue. If the Cell is changed a new @@ -132,7 +132,7 @@ public abstract class Filter { * @return the changed KeyValue * @throws IOException in case an I/O or an filter specific failure needs to be signaled. */ - abstract public Cell transformCell(final Cell v) throws IOException; + abstract public ServerCell transformCell(final ServerCell v) throws IOException; /** * Return codes for filterValue(). @@ -176,7 +176,7 @@ public abstract class Filter { * @param kvs the list of Cells to be filtered * @throws IOException in case an I/O or an filter specific failure needs to be signaled. */ - abstract public void filterRowCells(List kvs) throws IOException; + abstract public void filterRowCells(List kvs) throws IOException; /** * Primarily used to check for conflicts with scans(such as scans that do not read a full row at a @@ -211,7 +211,7 @@ public abstract class Filter { * seek to next. * @throws IOException in case an I/O or an filter specific failure needs to be signaled. */ - abstract public Cell getNextCellHint(final Cell currentCell) throws IOException; + abstract public ServerCell getNextCellHint(final ServerCell currentCell) throws IOException; /** * Check that given column family is essential for filter to check row. Most filters always return diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java index a04dd89..941cfc8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** @@ -75,7 +75,7 @@ public abstract class FilterBase extends Filter { * @inheritDoc */ @Override - public Cell transformCell(Cell v) throws IOException { + public ServerCell transformCell(ServerCell v) throws IOException { return v; } @@ -86,7 +86,7 @@ public abstract class FilterBase extends Filter { * @inheritDoc */ @Override - public void filterRowCells(List ignored) throws IOException { + public void filterRowCells(List ignored) throws IOException { } /** @@ -118,7 +118,7 @@ public abstract class FilterBase extends Filter { * * @inheritDoc */ - public Cell getNextCellHint(Cell currentCell) throws IOException { + public ServerCell getNextCellHint(ServerCell currentCell) throws IOException { return null; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index ba1a818..c490922 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -23,9 +23,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -70,7 +70,7 @@ final public class FilterList extends Filter { private Filter seekHintFilter = null; /** Reference Cell used by {@link #transformCell(Cell)} for validation purpose. */ - private Cell referenceCell = null; + private ServerCell referenceCell = null; /** * When filtering a given Cell in {@link #filterKeyValue(Cell)}, @@ -79,7 +79,7 @@ final public class FilterList extends Filter { * Individual filters transformation are applied only when the filter includes the Cell. * Transformations are composed in the order specified by {@link #filters}. */ - private Cell transformedCell = null; + private ServerCell transformedCell = null; /** * Constructor that takes a set of {@link Filter}s. The default operator @@ -215,7 +215,7 @@ final public class FilterList extends Filter { } @Override - public Cell transformCell(Cell c) throws IOException { + public ServerCell transformCell(ServerCell c) throws IOException { if (!CellComparator.equals(c, referenceCell)) { throw new IllegalStateException("Reference Cell: " + this.referenceCell + " does not match: " + c); @@ -226,11 +226,11 @@ final public class FilterList extends Filter { @Override @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH", justification="Intentional") - public ReturnCode filterKeyValue(Cell c) throws IOException { + public ReturnCode filterKeyValue(ServerCell c) throws IOException { this.referenceCell = c; // Accumulates successive transformation of every filter that includes the Cell: - Cell transformed = c; + ServerCell transformed = c; ReturnCode rc = operator == Operator.MUST_PASS_ONE? ReturnCode.SKIP: ReturnCode.INCLUDE; @@ -299,7 +299,7 @@ final public class FilterList extends Filter { * @inheritDoc */ @Override - public void filterRowCells(List cells) throws IOException { + public void filterRowCells(List cells) throws IOException { int listize = filters.size(); for (int i = 0; i < listize; i++) { filters.get(i).filterRowCells(cells); @@ -394,8 +394,8 @@ final public class FilterList extends Filter { } @Override - public Cell getNextCellHint(Cell currentCell) throws IOException { - Cell keyHint = null; + public ServerCell getNextCellHint(ServerCell currentCell) throws IOException { + ServerCell keyHint = null; if (operator == Operator.MUST_PASS_ALL) { keyHint = seekHintFilter.getNextCellHint(currentCell); return keyHint; @@ -404,7 +404,7 @@ final public class FilterList extends Filter { // If any condition can pass, we need to keep the min hint int listize = filters.size(); for (int i = 0; i < listize; i++) { - Cell curKeyHint = filters.get(i).getNextCellHint(currentCell); + ServerCell curKeyHint = filters.get(i).getNextCellHint(currentCell); if (curKeyHint == null) { // If we ever don't have a hint and this is must-pass-one, then no hint return null; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java index 5176115..5d08e45 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.List; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -96,7 +96,7 @@ final public class FilterWrapper extends Filter { } @Override - public Cell getNextCellHint(Cell currentCell) throws IOException { + public ServerCell getNextCellHint(ServerCell currentCell) throws IOException { return this.filter.getNextCellHint(currentCell); } @@ -106,12 +106,12 @@ final public class FilterWrapper extends Filter { } @Override - public ReturnCode filterKeyValue(Cell v) throws IOException { + public ReturnCode filterKeyValue(ServerCell v) throws IOException { return this.filter.filterKeyValue(v); } @Override - public Cell transformCell(Cell v) throws IOException { + public ServerCell transformCell(ServerCell v) throws IOException { return this.filter.transformCell(v); } @@ -121,7 +121,7 @@ final public class FilterWrapper extends Filter { } @Override - public void filterRowCells(List kvs) throws IOException { + public void filterRowCells(List kvs) throws IOException { filterRowCellsWithRet(kvs); } @@ -130,7 +130,7 @@ final public class FilterWrapper extends Filter { INCLUDE, // corresponds to filter.filterRow() returning false EXCLUDE // corresponds to filter.filterRow() returning true } - public FilterRowRetCode filterRowCellsWithRet(List kvs) throws IOException { + public FilterRowRetCode filterRowCellsWithRet(List kvs) throws IOException { //To fix HBASE-6429, //Filter with filterRow() returning true is incompatible with scan with limit //1. hasFilterRow() returns true, if either filterRow() or filterRow(kvs) is implemented. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index 77ed7d9..ee3781c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -47,7 +47,7 @@ public class FirstKeyOnlyFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { if(foundKV) return ReturnCode.NEXT_ROW; foundKV = true; return ReturnCode.INCLUDE; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java index 622f5ab..d41ea4a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.filter; import java.util.Set; import java.util.TreeSet; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -61,7 +61,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { if (hasFoundKV()) { return ReturnCode.NEXT_ROW; } else if (hasOneMatchingQualifier(v)) { @@ -70,7 +70,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter { return ReturnCode.INCLUDE; } - private boolean hasOneMatchingQualifier(Cell v) { + private boolean hasOneMatchingQualifier(ServerCell v) { for (byte[] q : qualifiers) { if (CellUtil.matchingQualifier(v, q)) { return true; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 9b99b71..bf6af6b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -85,7 +85,7 @@ public class FuzzyRowFilter extends FilterBase { // TODO: possible improvement: save which fuzzy row key to use when providing a hint @Override - public ReturnCode filterKeyValue(Cell c) { + public ReturnCode filterKeyValue(ServerCell c) { // assigning "worst" result first and looking for better options SatisfiesCode bestOption = SatisfiesCode.NO_NEXT; for (Pair fuzzyData : fuzzyKeysData) { @@ -110,7 +110,7 @@ public class FuzzyRowFilter extends FilterBase { } @Override - public Cell getNextCellHint(Cell currentCell) { + public ServerCell getNextCellHint(ServerCell currentCell) { byte[] nextRowKey = null; // Searching for the "smallest" row key that satisfies at least one fuzzy row key for (Pair fuzzyData : fuzzyKeysData) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index cf2d153..cd45074 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -53,7 +53,7 @@ public class InclusiveStopFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { if (done) return ReturnCode.NEXT_ROW; return ReturnCode.INCLUDE; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index 2a2b525..a1183cf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -50,11 +50,11 @@ public class KeyOnlyFilter extends FilterBase { public KeyOnlyFilter(boolean lenAsVal) { this.lenAsVal = lenAsVal; } @Override - public Cell transformCell(Cell cell) { + public ServerCell transformCell(ServerCell cell) { return createKeyOnlyCell(cell); } - private Cell createKeyOnlyCell(Cell c) { + private ServerCell createKeyOnlyCell(ServerCell c) { // KV format: // Rebuild as: <0:4> int dataLen = lenAsVal ? Bytes.SIZEOF_INT : 0; @@ -71,7 +71,7 @@ public class KeyOnlyFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell ignored) throws IOException { + public ReturnCode filterKeyValue(ServerCell ignored) throws IOException { return ReturnCode.INCLUDE; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java index bd880a0..cce01c3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java @@ -24,9 +24,9 @@ import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; @@ -124,12 +124,12 @@ public class MultiRowRangeFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell ignored) { + public ReturnCode filterKeyValue(ServerCell ignored) { return currentReturnCode; } @Override - public Cell getNextCellHint(Cell currentKV) { + public ServerCell getNextCellHint(ServerCell currentCell) { // skip to the next range's start row return KeyValueUtil.createFirstOnRow(range.startRow); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index b7ec11a..6888a2e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -22,8 +22,8 @@ import java.util.Arrays; import java.util.Comparator; import java.util.TreeSet; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -64,7 +64,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell kv) { + public ReturnCode filterKeyValue(ServerCell kv) { if (sortedPrefixes.size() == 0 || kv.getQualifierArray() == null) { return ReturnCode.INCLUDE; } else { @@ -155,7 +155,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { } @Override - public Cell getNextCellHint(Cell cell) { + public ServerCell getNextCellHint(ServerCell cell) { return KeyValueUtil.createFirstOnRow( cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), hint, 0, hint.length); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index 0dbd97b..a80277d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -61,7 +61,7 @@ public class PageFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell ignored) throws IOException { + public ReturnCode filterKeyValue(ServerCell ignored) throws IOException { return ReturnCode.INCLUDE; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index 5b56748..bd32a68 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -68,7 +68,7 @@ public class PrefixFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { if (filterRow) return ReturnCode.NEXT_ROW; return ReturnCode.INCLUDE; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index fb183f1..6de0dc8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -60,7 +60,7 @@ public class QualifierFilter extends CompareFilter { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { int qualifierLength = v.getQualifierLength(); if (qualifierLength > 0) { if (doCompare(this.compareOp, this.comparator, v.getQualifierArray(), diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java index 2a25b32..d27acd8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.util.Random; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -72,7 +72,7 @@ public class RandomRowFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { if (filterOutRow) { return ReturnCode.NEXT_ROW; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index cb4337e..c5a82e5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -66,7 +66,7 @@ public class RowFilter extends CompareFilter { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { if(this.filterOutRow) { return ReturnCode.NEXT_ROW; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index d030fd2..fd0b7cd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -24,8 +24,8 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -103,8 +103,8 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { // Here we remove from row all key values from testing column @Override - public void filterRowCells(List kvs) { - Iterator it = kvs.iterator(); + public void filterRowCells(List kvs) { + Iterator it = kvs.iterator(); while (it.hasNext()) { // If the current column is actually the tested column, // we will skip it instead. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index d905868..8aaadff 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -24,8 +24,8 @@ import java.util.ArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -169,7 +169,7 @@ public class SingleColumnValueFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell c) { + public ReturnCode filterKeyValue(ServerCell c) { // System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + Bytes.toString(keyValue.getValue())); if (this.matchedColumn) { // We already found and matched the single column, all keys now pass diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index ce8e511..9aeef68 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -74,14 +74,14 @@ public class SkipFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) throws IOException { + public ReturnCode filterKeyValue(ServerCell v) throws IOException { ReturnCode c = filter.filterKeyValue(v); changeFR(c != ReturnCode.INCLUDE); return c; } @Override - public Cell transformCell(Cell v) throws IOException { + public ServerCell transformCell(ServerCell v) throws IOException { return filter.transformCell(v); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index 32a3d73..cce2238 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -21,7 +21,7 @@ import java.util.ArrayList; import java.util.List; import java.util.TreeSet; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -89,7 +89,7 @@ public class TimestampsFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { if (this.timestamps.contains(v.getTimestamp())) { return ReturnCode.INCLUDE; } else if (v.getTimestamp() < minTimeStamp) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index a2c5eb2..24a514d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -59,7 +59,7 @@ public class ValueFilter extends CompareFilter { } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { if (doCompare(this.compareOp, this.comparator, v.getValueArray(), v.getValueOffset(), v.getValueLength())) { return ReturnCode.SKIP; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index 31d4f77..33b8773 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -73,14 +73,14 @@ public class WhileMatchFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) throws IOException { + public ReturnCode filterKeyValue(ServerCell v) throws IOException { ReturnCode c = filter.filterKeyValue(v); changeFAR(c != ReturnCode.INCLUDE); return c; } @Override - public Cell transformCell(Cell v) throws IOException { + public ServerCell transformCell(ServerCell v) throws IOException { return filter.transformCell(v); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 8b5b2d7..c3c02ed 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -2513,6 +2514,14 @@ public final class ProtobufUtil { kv.getValueLength())); return kvbuilder.build(); } + + public static CellProtos.Cell toCell(final ServerCell cell) { + if(cell.hasArray()){ + return toCell((Cell)cell); + } + // TODO use getXXXBuffer APIs and make CellProtos.Cell object + return null; + } public static Cell toCell(final CellProtos.Cell cell) { // Doing this is going to kill us if we do it for all data passed. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java index 540c967..c541ac8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java @@ -48,6 +48,7 @@ public class CellComparator implements Comparator, Serializable { return compare(a, b, false); } + // TODO we will have to add ServerCell versions of compare too. /** * Compare cells. * TODO: Replace with dynamic rather than static comparator so can change comparator @@ -389,8 +390,8 @@ public class CellComparator implements Comparator, Serializable { * @param right * @return A cell that sorts between left and right. */ - public static Cell getMidpoint(final KeyValue.KVComparator comparator, final Cell left, - final Cell right) { + public static ServerCell getMidpoint(final KeyValue.KVComparator comparator, final ServerCell left, + final ServerCell right) { // TODO: Redo so only a single pass over the arrays rather than one to compare and then a // second composing midpoint. if (right == null) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index bce3957..ec104c3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase; import java.io.DataOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.Iterator; import java.util.List; @@ -166,7 +167,7 @@ public final class CellUtil { return buffer; } - public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier, + public static ServerCell createCell(final byte [] row, final byte [] family, final byte [] qualifier, final long timestamp, final byte type, final byte [] value) { // I need a Cell Factory here. Using KeyValue for now. TODO. // TODO: Make a new Cell implementation that just carries these @@ -175,7 +176,7 @@ public final class CellUtil { return new KeyValue(row, family, qualifier, timestamp, KeyValue.Type.codeToType(type), value); } - public static Cell createCell(final byte [] rowArray, final int rowOffset, final int rowLength, + public static ServerCell createCell(final byte [] rowArray, final int rowOffset, final int rowLength, final byte [] familyArray, final int familyOffset, final int familyLength, final byte [] qualifierArray, final int qualifierOffset, final int qualifierLength) { // See createCell(final byte [] row, final byte [] value) for why we default Maximum type. @@ -215,7 +216,7 @@ public final class CellUtil { * @param row * @return Cell with passed row but all other fields are arbitrary */ - public static Cell createCell(final byte [] row) { + public static ServerCell createCell(final byte [] row) { return createCell(row, HConstants.EMPTY_BYTE_ARRAY); } @@ -225,7 +226,7 @@ public final class CellUtil { * @param value * @return Cell with passed row and value but all other fields are arbitrary */ - public static Cell createCell(final byte [] row, final byte [] value) { + public static ServerCell createCell(final byte [] row, final byte [] value) { // An empty family + empty qualifier + Type.Minimum is used as flag to indicate last on row. // See the CellComparator and KeyValue comparator. Search for compareWithoutRow. // Lets not make a last-on-row key as default but at same time, if you are making a key @@ -241,7 +242,7 @@ public final class CellUtil { * @param qualifier * @return Cell with passed row but all other fields are arbitrary */ - public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier) { + public static ServerCell createCell(final byte [] row, final byte [] family, final byte [] qualifier) { // See above in createCell(final byte [] row, final byte [] value) why we set type to Maximum. return createCell(row, family, qualifier, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum.getCode(), HConstants.EMPTY_BYTE_ARRAY); @@ -389,11 +390,16 @@ public final class CellUtil { length); } - public static boolean matchingFamily(final Cell left, final Cell right) { + public static boolean matchingFamily(final ServerCell left, final ServerCell right) { return Bytes.equals(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); } + public static boolean matchingFamily(final ServerCell left, final byte[] buf) { + return Bytes.equals(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), buf, + 0, buf.length); + } + public static boolean matchingFamily(final Cell left, final byte[] buf) { return Bytes.equals(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), buf, 0, buf.length); @@ -405,12 +411,22 @@ public final class CellUtil { offset, length); } + public static boolean matchingFamily(final ServerCell left, final byte[] buf, final int offset, + final int length) { + return Bytes.equals(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), buf, + offset, length); + } + public static boolean matchingQualifier(final Cell left, final Cell right) { return Bytes.equals(left.getQualifierArray(), left.getQualifierOffset(), left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(), right.getQualifierLength()); } + public static boolean matchingQualifier(final ServerCell left, final byte[] buf) { + return matchingQualifier((Cell)left, buf); + } + public static boolean matchingQualifier(final Cell left, final byte[] buf) { if (buf == null) { return left.getQualifierLength() == 0; @@ -428,12 +444,19 @@ public final class CellUtil { left.getQualifierLength(), buf, offset, length); } + // TODO we will have to add ServerCell versions of methods. public static boolean matchingColumn(final Cell left, final byte[] fam, final byte[] qual) { if (!matchingFamily(left, fam)) return false; return matchingQualifier(left, qual); } + public static boolean matchingColumn(final ServerCell left, final byte[] fam, final byte[] qual) { + if (!matchingFamily(left, fam)) + return false; + return matchingQualifier(left, qual); + } + public static boolean matchingColumn(final Cell left, final byte[] fam, final int foffset, final int flength, final byte[] qual, final int qoffset, final int qlength) { if (!matchingFamily(left, fam, foffset, flength)) @@ -441,7 +464,14 @@ public final class CellUtil { return matchingQualifier(left, qual, qoffset, qlength); } - public static boolean matchingColumn(final Cell left, final Cell right) { + public static boolean matchingColumn(final ServerCell left, final byte[] fam, final int foffset, + final int flength, final byte[] qual, final int qoffset, final int qlength) { + if (!matchingFamily(left, fam, foffset, flength)) + return false; + return matchingQualifier(left, qual, qoffset, qlength); + } + + public static boolean matchingColumn(final ServerCell left, final ServerCell right) { if (!matchingFamily(left, right)) return false; return matchingQualifier(left, right); @@ -639,11 +669,11 @@ public final class CellUtil { * @throws IOException when the passed cell is not of type {@link SettableSequenceId} */ public static void setSequenceId(Cell cell, long seqId) throws IOException { - if (cell instanceof SettableSequenceId) { - ((SettableSequenceId) cell).setSequenceId(seqId); + if (cell instanceof ServerCell) { + ((ServerCell) cell).setSequenceId(seqId); } else { throw new IOException(new UnsupportedOperationException("Cell is not of type " - + SettableSequenceId.class.getName())); + + ServerCell.class.getName())); } } @@ -654,11 +684,11 @@ public final class CellUtil { * @throws IOException when the passed cell is not of type {@link SettableTimestamp} */ public static void setTimestamp(Cell cell, long ts) throws IOException { - if (cell instanceof SettableTimestamp) { - ((SettableTimestamp) cell).setTimestamp(ts); + if (cell instanceof ServerCell) { + ((ServerCell) cell).setTimestamp(ts); } else { throw new IOException(new UnsupportedOperationException("Cell is not of type " - + SettableTimestamp.class.getName())); + + ServerCell.class.getName())); } } @@ -670,11 +700,11 @@ public final class CellUtil { * @throws IOException when the passed cell is not of type {@link SettableTimestamp} */ public static void setTimestamp(Cell cell, byte[] ts, int tsOffset) throws IOException { - if (cell instanceof SettableTimestamp) { - ((SettableTimestamp) cell).setTimestamp(ts, tsOffset); + if (cell instanceof ServerCell) { + ((ServerCell) cell).setTimestamp(ts, tsOffset); } else { throw new IOException(new UnsupportedOperationException("Cell is not of type " - + SettableTimestamp.class.getName())); + + ServerCell.class.getName())); } } @@ -902,4 +932,14 @@ public final class CellUtil { return builder.toString(); } + + public static void oswrite(final Cell cell, final OutputStream out, final boolean withTags) + throws IOException { + if (cell instanceof ServerCell) { + ((ServerCell) cell).oswrite(out, withTags); + } else { + // TODO change to IOE? + throw new IllegalStateException("Got a cell which can not be written to OutputStream"); + } + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 19e251a..841a4c1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -957,7 +957,7 @@ public final class HConstants { * The byte array represents for NO_NEXT_INDEXED_KEY; * The actual value is irrelevant because this is always compared by reference. */ - public static final Cell NO_NEXT_INDEXED_KEY = new KeyValue(); + public static final ServerCell NO_NEXT_INDEXED_KEY = new KeyValue(); /** delimiter used between portions of a region name */ public static final int DELIMITER = ','; public static final String HBASE_CONFIG_READ_ZOOKEEPER_CONFIG = diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 7de1f54..fb80abb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -79,7 +79,7 @@ import com.google.common.annotations.VisibleForTesting; * and actual tag bytes length. */ @InterfaceAudience.Private -public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, SettableTimestamp { +public class KeyValue implements ServerCell, HeapSize, Cloneable { private static final ArrayList EMPTY_ARRAY_LIST = new ArrayList(); static final Log LOG = LogFactory.getLog(KeyValue.class); @@ -1058,10 +1058,10 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, */ @Override public boolean equals(Object other) { - if (!(other instanceof Cell)) { + if (!(other instanceof ServerCell)) { return false; } - return CellComparator.equals(this, (Cell)other); + return CellComparator.equals(this, (ServerCell)other); } /** @@ -1945,10 +1945,12 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, return (0xff & type) - (0xff & cell.getTypeByte()); } + // TODO how to accept ServerCell in Comparators public int compareOnlyKeyPortion(Cell left, Cell right) { return CellComparator.compare(left, right, true); } + // TODO we will have to add ServerCell versions of compare too. /** * Compares the Key of a cell -- with fields being more significant in this order: * rowkey, colfam/qual, timestamp, type, mvcc @@ -2822,4 +2824,14 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, return super.equals(other); } } + + @Override + public boolean hasArray() { + return true; + } + + @Override + public void oswrite(OutputStream out, boolean withTags) throws IOException { + oswrite(this, out, withTags); + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index dde15bc..6afc31b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -18,15 +18,12 @@ package org.apache.hadoop.hbase; -import java.io.IOException; -import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IterableUtils; @@ -41,6 +38,7 @@ import com.google.common.collect.Lists; */ @InterfaceAudience.Private public class KeyValueUtil { + // TODO many of the APIs in this can be removed and instead use CellUtil APIs. /**************** length *********************/ @@ -54,7 +52,7 @@ public class KeyValueUtil { cell.getValueLength(), cell.getTagsLength(), true); } - private static int length(short rlen, byte flen, int qlen, int vlen, int tlen, boolean withTags) { + public static int length(short rlen, byte flen, int qlen, int vlen, int tlen, boolean withTags) { if (withTags) { return (int) (KeyValue.getKeyValueDataStructureSize(rlen, flen, qlen, vlen, tlen)); } @@ -71,7 +69,7 @@ public class KeyValueUtil { return keyLength(cell.getRowLength(), cell.getFamilyLength(), cell.getQualifierLength()); } - private static int keyLength(short rlen, byte flen, int qlen) { + public static int keyLength(short rlen, byte flen, int qlen) { return (int) KeyValue.getKeyDataStructureSize(rlen, flen, qlen); } @@ -541,47 +539,4 @@ public class KeyValueUtil { }); return new ArrayList(lazyList); } - - public static void oswrite(final Cell cell, final OutputStream out, final boolean withTags) - throws IOException { - if (cell instanceof KeyValue) { - KeyValue.oswrite((KeyValue) cell, out, withTags); - } else { - short rlen = cell.getRowLength(); - byte flen = cell.getFamilyLength(); - int qlen = cell.getQualifierLength(); - int vlen = cell.getValueLength(); - int tlen = cell.getTagsLength(); - - // write total length - StreamUtils.writeInt(out, length(rlen, flen, qlen, vlen, tlen, withTags)); - // write key length - StreamUtils.writeInt(out, keyLength(rlen, flen, qlen)); - // write value length - StreamUtils.writeInt(out, vlen); - // Write rowkey - 2 bytes rk length followed by rowkey bytes - StreamUtils.writeShort(out, rlen); - out.write(cell.getRowArray(), cell.getRowOffset(), rlen); - // Write cf - 1 byte of cf length followed by the family bytes - out.write(flen); - out.write(cell.getFamilyArray(), cell.getFamilyOffset(), flen); - // write qualifier - out.write(cell.getQualifierArray(), cell.getQualifierOffset(), qlen); - // write timestamp - StreamUtils.writeLong(out, cell.getTimestamp()); - // write the type - out.write(cell.getTypeByte()); - // write value - out.write(cell.getValueArray(), cell.getValueOffset(), vlen); - // write tags if we have to - if (withTags) { - // 2 bytes tags length followed by tags bytes - // tags length is serialized with 2 bytes only(short way) even if the type is int. As this - // is non -ve numbers, we save the sign bit. See HBASE-11437 - out.write((byte) (0xff & (tlen >> 8))); - out.write((byte) (0xff & tlen)); - out.write(cell.getTagsArray(), cell.getTagsOffset(), tlen); - } - } - } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerCell.java new file mode 100644 index 0000000..0d36378 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerCell.java @@ -0,0 +1,58 @@ +/** + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import java.io.IOException; +import java.io.OutputStream; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +@InterfaceAudience.Private +public interface ServerCell extends Cell { + + /** + * @return true if the Cell is backed by byte[]. In case of BB backed Cell we + * return false. + */ + boolean hasArray(); + + // TODO add the buffer backed getter APIs here + + void oswrite(OutputStream out, boolean withTags) throws IOException; + + /** + * Sets with the given seqId. + * @param seqId + */ + void setSequenceId(long seqId) throws IOException; + + /** + * Sets with the given timestamp. + * @param ts + */ + void setTimestamp(long ts) throws IOException; + + /** + * Sets with the given timestamp. + * @param ts buffer containing the timestamp value + * @param tsOffset offset to the new timestamp + */ + void setTimestamp(byte[] ts, int tsOffset) throws IOException; +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableSequenceId.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableSequenceId.java deleted file mode 100644 index 352028a..0000000 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableSequenceId.java +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import java.io.IOException; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - -/** - * Using this Interface one can mark a Cell as Sequence stampable.
- * Note : Make sure to make Cell implementation of this type in server side. - */ -@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) -public interface SettableSequenceId { - - /** - * Sets with the given seqId. - * @param seqId - */ - void setSequenceId(long seqId) throws IOException; -} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableTimestamp.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableTimestamp.java deleted file mode 100644 index 6dac5ae..0000000 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/SettableTimestamp.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import java.io.IOException; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; - -/** - * Using this Interface one can mark a Cell as timestamp changeable.
- * Note : Server side Cell implementations in write path must implement this. - */ -@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) -public interface SettableTimestamp { - - /** - * Sets with the given timestamp. - * @param ts - */ - void setTimestamp(long ts) throws IOException; - - /** - * Sets with the given timestamp. - * @param ts buffer containing the timestamp value - * @param tsOffset offset to the new timestamp - */ - void setTimestamp(byte[] ts, int tsOffset) throws IOException; -} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java index 9d03d89..5c503fb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java @@ -25,6 +25,7 @@ import org.apache.commons.io.IOUtils; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; @@ -59,6 +60,13 @@ public class CellCodec implements Codec { this.out.write(Bytes.toBytes(cell.getMvccVersion())); } + @Override + public void write(ServerCell cell) throws IOException { + // TODO + // Same impl as above method. But check for hasArray and if false, deal with getXXXBuffer + // APIs. + } + /** * Write int length followed by array bytes. * @param bytes diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java index a614026..5775ab2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java @@ -25,6 +25,7 @@ import org.apache.commons.io.IOUtils; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; @@ -61,6 +62,13 @@ public class CellCodecWithTags implements Codec { this.out.write(Bytes.toBytes(cell.getMvccVersion())); } + @Override + public void write(ServerCell cell) throws IOException { + // TODO + // Same impl as above method. But check for hasArray and if false, deal with getXXXBuffer + // APIs. + } + /** * Write int length followed by array bytes. * diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java index de44ec6..4ae9650 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java @@ -23,7 +23,7 @@ import java.io.OutputStream; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.io.CellOutputStream; +import org.apache.hadoop.hbase.io.ServerCellOutputStream; /** * Encoder/Decoder for Cell. @@ -40,7 +40,7 @@ public interface Codec { * Call flush when done. Some encoders may not put anything on the stream until flush is called. * On flush, let go of any resources used by the encoder. */ - interface Encoder extends CellOutputStream {} + interface Encoder extends ServerCellOutputStream {} /** * Implementations should implicitly clean up any resources allocated when the diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java index f41d6b0..0154474 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java @@ -22,9 +22,10 @@ import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** @@ -55,7 +56,12 @@ public class KeyValueCodec implements Codec { public void write(Cell cell) throws IOException { checkFlushed(); // Do not write tags over RPC - KeyValueUtil.oswrite(cell, out, false); + CellUtil.oswrite(cell, out, false); + } + + @Override + public void write(ServerCell cell) throws IOException { + write((Cell) cell); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java index 664fcac..2f92f23 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java @@ -22,9 +22,10 @@ import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** @@ -61,7 +62,12 @@ public class KeyValueCodecWithTags implements Codec { public void write(Cell cell) throws IOException { checkFlushed(); // Write tags - KeyValueUtil.oswrite(cell, out, true); + CellUtil.oswrite(cell, out, true); + } + + @Override + public void write(ServerCell cell) throws IOException { + write((Cell) cell); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ServerCellOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ServerCellOutputStream.java new file mode 100644 index 0000000..0eb70b7 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ServerCellOutputStream.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io; + +import java.io.IOException; + +import org.apache.hadoop.hbase.ServerCell; +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +@InterfaceAudience.Private +public interface ServerCellOutputStream extends CellOutputStream { + + void write(ServerCell cell) throws IOException; +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index be8c192..5bcf799 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; @@ -30,13 +31,14 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.SettableSequenceId; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.util.LRUDictionary; +import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -306,7 +308,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { + getValueLength() + "/seqid=" + memstoreTS; } - public Cell shallowCopy() { + public ServerCell shallowCopy() { return new ClonedSeekerState(currentBuffer, keyBuffer, currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(), keyLength, currentKey.getQualifierOffset(), currentKey.getQualifierLength(), @@ -325,7 +327,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { // there. So this has to be an instance of SettableSequenceId. SeekerState need not be // SettableSequenceId as we never return that to top layers. When we have to, we make // ClonedSeekerState from it. - protected static class ClonedSeekerState implements Cell, HeapSize, SettableSequenceId { + protected static class ClonedSeekerState implements ServerCell, HeapSize { private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + (4 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (7 * Bytes.SIZEOF_INT) + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (2 * ClassSize.ARRAY)); @@ -514,6 +516,49 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { public long heapSize() { return FIXED_OVERHEAD + rowLength + familyLength + qualifierLength + valueLength + tagsLength; } + + @Override + public boolean hasArray() { + return true; + } + + @Override + public void oswrite(OutputStream out, boolean withTags) throws IOException { + int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength, + tagsLength, withTags); + StreamUtils.writeInt(out, lenToWrite); + StreamUtils.writeInt(out, keyOnlyBuffer.length); + StreamUtils.writeInt(out, valueLength); + // Write key + out.write(keyOnlyBuffer); + // Write value + out.write(this.currentBuffer.array(), this.currentBuffer.arrayOffset() + this.valueOffset, + this.valueLength); + if (withTags) { + // 2 bytes tags length followed by tags bytes + // tags length is serialized with 2 bytes only(short way) even if the + // type is int. As this + // is non -ve numbers, we save the sign bit. See HBASE-11437 + out.write((byte) (0xff & (this.tagsLength >> 8))); + out.write((byte) (0xff & this.tagsLength)); + if (this.tagCompressionContext != null) { + out.write(cloneTagsBuffer); + } else { + out.write(this.currentBuffer.array(), this.currentBuffer.arrayOffset() + this.tagsOffset, + this.tagsLength); + } + } + } + + @Override + public void setTimestamp(long ts) throws IOException { + this.timestamp = ts; + } + + @Override + public void setTimestamp(byte[] ts, int tsOffset) throws IOException { + this.timestamp = Bytes.toLong(ts, tsOffset); + } } protected abstract static class @@ -556,7 +601,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } @Override - public int compareKey(KVComparator comparator, Cell key) { + public int compareKey(KVComparator comparator, ServerCell key) { return comparator.compareOnlyKeyPortion(key, new KeyValue.KeyOnlyKeyValue(current.keyBuffer, 0, current.keyLength)); } @@ -625,7 +670,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } @Override - public Cell getKeyValue() { + public ServerCell getKeyValue() { return current.shallowCopy(); } @@ -682,7 +727,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } @Override - public int seekToKeyInBlock(Cell seekCell, boolean seekBefore) { + public int seekToKeyInBlock(ServerCell seekCell, boolean seekBefore) { int rowCommonPrefix = 0; int familyCommonPrefix = 0; int qualCommonPrefix = 0; @@ -749,7 +794,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } } } else { - Cell r = new KeyValue.KeyOnlyKeyValue(current.keyBuffer, 0, current.keyLength); + ServerCell r = new KeyValue.KeyOnlyKeyValue(current.keyBuffer, 0, current.keyLength); comp = comparator.compareOnlyKeyPortion(seekCell, r); } if (comp == 0) { // exact match @@ -790,7 +835,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { return 1; } - private int compareTypeBytes(Cell key, Cell right) { + private int compareTypeBytes(ServerCell key, ServerCell right) { if (key.getFamilyLength() + key.getQualifierLength() == 0 && key.getTypeByte() == Type.Minimum.getCode()) { // left is "bigger", i.e. it appears later in the sorted order @@ -846,7 +891,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { * @return unencoded size added * @throws IOException */ - protected final int afterEncodingKeyValue(Cell cell, DataOutputStream out, + protected final int afterEncodingKeyValue(ServerCell cell, DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) throws IOException { int size = 0; if (encodingCtx.getHFileContext().isIncludesTags()) { @@ -978,7 +1023,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } @Override - public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) + public int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) throws IOException { BufferedDataBlockEncodingState state = (BufferedDataBlockEncodingState) encodingCtx .getEncodingState(); @@ -987,7 +1032,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { return encodedKvSize; } - public abstract int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingCtx, + public abstract int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingCtx, DataOutputStream out) throws IOException; @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java index 6b87c77..7cf3c00 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java @@ -21,11 +21,11 @@ import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -39,7 +39,7 @@ import org.apache.hadoop.io.WritableUtils; public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder { @Override - public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext, + public int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingContext, DataOutputStream out) throws IOException { int klength = KeyValueUtil.keyLength(cell); int vlength = cell.getValueLength(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java index 872c22c..1bf8e4f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java @@ -21,8 +21,8 @@ import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.HFileContext; @@ -58,7 +58,7 @@ public interface DataBlockEncoder { * @return unencoded kv size written * @throws IOException */ - int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) + int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) throws IOException; /** @@ -161,7 +161,7 @@ public interface DataBlockEncoder { * @return the KeyValue object at the current position. Includes memstore * timestamp. */ - Cell getKeyValue(); + ServerCell getKeyValue(); /** Set position to beginning of given block */ void rewind(); @@ -208,7 +208,7 @@ public interface DataBlockEncoder { * of an exact match. Does not matter in case of an inexact match. * @return 0 on exact match, 1 on inexact match. */ - int seekToKeyInBlock(Cell key, boolean seekBefore); + int seekToKeyInBlock(ServerCell key, boolean seekBefore); /** * Compare the given key against the current key @@ -220,6 +220,6 @@ public interface DataBlockEncoder { */ public int compareKey(KVComparator comparator, byte[] key, int offset, int length); - public int compareKey(KVComparator comparator, Cell key); + public int compareKey(KVComparator comparator, ServerCell key); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java index 4182dc4..947013f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java @@ -21,11 +21,11 @@ import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -195,7 +195,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext, + public int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingContext, DataOutputStream out) throws IOException { EncodingState state = encodingContext.getEncodingState(); int size = compressSingleKeyValue(out, cell, state.prevCell); @@ -204,7 +204,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder { return size; } - private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell) + private int compressSingleKeyValue(DataOutputStream out, ServerCell cell, ServerCell prevCell) throws IOException { byte flag = 0; int kLength = KeyValueUtil.keyLength(cell); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java index a333a15..93d5d2b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.io.encoding; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** @@ -30,5 +30,5 @@ public class EncodingState { /** * The previous Cell the encoder encoded. */ - protected Cell prevCell = null; + protected ServerCell prevCell = null; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java index a6f43d0..7b44452 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java @@ -21,11 +21,11 @@ import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -239,7 +239,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext, + public int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingContext, DataOutputStream out) throws IOException { EncodingState state = encodingContext.getEncodingState(); int size = compressSingleKeyValue(out, cell, state.prevCell); @@ -248,7 +248,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder { return size; } - private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell) + private int compressSingleKeyValue(DataOutputStream out, ServerCell cell, ServerCell prevCell) throws IOException { byte flag = 0; int kLength = KeyValueUtil.keyLength(cell); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java index 0286eca..39522cc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java @@ -21,11 +21,11 @@ import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.util.Bytes; public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder { @Override - public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext, + public int internalEncode(ServerCell cell, HFileBlockDefaultEncodingContext encodingContext, DataOutputStream out) throws IOException { int klength = KeyValueUtil.keyLength(cell); int vlength = cell.getValueLength(); @@ -75,7 +75,7 @@ public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder { return size; } - private void writeKeyExcludingCommon(Cell cell, int commonPrefix, DataOutputStream out) + private void writeKeyExcludingCommon(ServerCell cell, int commonPrefix, DataOutputStream out) throws IOException { short rLen = cell.getRowLength(); if (commonPrefix < rLen + KeyValue.ROW_LENGTH_SIZE) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java index 007f826..e00495c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -82,9 +81,9 @@ public class TestCellComparator { public void testGetShortMidpoint() { KeyValue.KVComparator comparator = new KeyValue.KVComparator(); - Cell left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); - Cell right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); - Cell mid = CellComparator.getMidpoint(comparator, left, right); + ServerCell left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); + ServerCell right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); + ServerCell mid = CellComparator.getMidpoint(comparator, left, right); assertTrue(CellComparator.compare(left, mid, true) <= 0); assertTrue(CellComparator.compare(mid, right, true) <= 0); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java index e0c3bae..3c3e578 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java @@ -26,12 +26,12 @@ import java.util.TreeSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Scan; @@ -134,9 +134,9 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor // filter and having necessary column(s). scanner = region.getScanner(scan); while (hasMore) { - List> deleteRows = new ArrayList>(rowBatchSize); + List> deleteRows = new ArrayList>(rowBatchSize); for (int i = 0; i < rowBatchSize; i++) { - List results = new ArrayList(); + List results = new ArrayList(); hasMore = NextState.hasMoreValues(scanner.next(results)); if (results.size() > 0) { deleteRows.add(results); @@ -149,7 +149,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor if (deleteRows.size() > 0) { Mutation[] deleteArr = new Mutation[deleteRows.size()]; int i = 0; - for (List deleteRow : deleteRows) { + for (List deleteRow : deleteRows) { deleteArr[i++] = createDeleteMutation(deleteRow, deleteType, timestamp); } OperationStatus[] opStatus = region.batchMutate(deleteArr, HConstants.NO_NONCE, @@ -191,7 +191,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor done.run(result); } - private Delete createDeleteMutation(List deleteRow, DeleteType deleteType, + private Delete createDeleteMutation(List deleteRow, DeleteType deleteType, Long timestamp) { long ts; if (timestamp == null) { @@ -204,14 +204,14 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor Delete delete = new Delete(row, ts); if (deleteType == DeleteType.FAMILY) { Set families = new TreeSet(Bytes.BYTES_COMPARATOR); - for (Cell kv : deleteRow) { + for (ServerCell kv : deleteRow) { if (families.add(CellUtil.cloneFamily(kv))) { delete.deleteFamily(CellUtil.cloneFamily(kv), ts); } } } else if (deleteType == DeleteType.COLUMN) { Set columns = new HashSet(); - for (Cell kv : deleteRow) { + for (ServerCell kv : deleteRow) { Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv)); if (columns.add(column)) { // Making deleteColumns() calls more than once for the same cf:qualifier is not correct @@ -227,13 +227,13 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor // the scan fetched will get deleted. int noOfVersionsToDelete = 0; if (timestamp == null) { - for (Cell kv : deleteRow) { + for (ServerCell kv : deleteRow) { delete.deleteColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp()); noOfVersionsToDelete++; } } else { Set columns = new HashSet(); - for (Cell kv : deleteRow) { + for (ServerCell kv : deleteRow) { Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv)); // Only one version of particular column getting deleted. if (columns.add(column)) { diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java index 2afd05e..78d92b3 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java @@ -22,10 +22,10 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.hadoop.hbase.coprocessor.CoprocessorService; @@ -76,13 +76,13 @@ public class RowCountEndpoint extends ExampleProtos.RowCountService InternalScanner scanner = null; try { scanner = env.getRegion().getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); boolean hasMore = false; byte[] lastRow = null; long count = 0; do { hasMore = NextState.hasMoreValues(scanner.next(results)); - for (Cell kv : results) { + for (ServerCell kv : results) { byte[] currentRow = CellUtil.cloneRow(kv); if (lastRow == null || !Bytes.equals(lastRow, currentRow)) { lastRow = currentRow; @@ -116,12 +116,12 @@ public class RowCountEndpoint extends ExampleProtos.RowCountService InternalScanner scanner = null; try { scanner = env.getRegion().getScanner(new Scan()); - List results = new ArrayList(); + List results = new ArrayList(); boolean hasMore = false; long count = 0; do { hasMore = NextState.hasMoreValues(scanner.next(results)); - for (Cell kv : results) { + for (ServerCell kv : results) { count++; } results.clear(); diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java index afcb526..5b482de 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java @@ -24,12 +24,12 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.MetaComparator; import org.apache.hadoop.hbase.KeyValue.RawBytesComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory; import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher; import org.apache.hadoop.hbase.codec.prefixtree.encode.EncoderFactory; @@ -162,7 +162,7 @@ public class PrefixTreeCodec implements DataBlockEncoder{ } @Override - public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) + public int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) throws IOException { PrefixTreeEncodingState state = (PrefixTreeEncodingState) encodingCtx.getEncodingState(); PrefixTreeEncoder builder = state.builder; diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java index b95055c..d001ad7 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java @@ -18,21 +18,23 @@ package org.apache.hadoop.hbase.codec.prefixtree; +import java.io.IOException; +import java.io.OutputStream; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.SettableSequenceId; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory; import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker; +import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -96,8 +98,8 @@ public class PrefixTreeSeeker implements EncodedSeeker { * currently must do deep copy into new array */ @Override - public Cell getKeyValue() { - Cell cell = ptSearcher.current(); + public ServerCell getKeyValue() { + ServerCell cell = ptSearcher.current(); if (cell == null) { return null; } @@ -119,7 +121,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to * use this method instead of the getKeyValue() methods above. */ - public Cell get() { + public ServerCell get() { return ptSearcher.current(); } @@ -187,7 +189,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { * both. Possibly expand the EncodedSeeker to utilize them both. */ - protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) { + protected int seekToOrBeforeUsingPositionAtOrBefore(ServerCell kv, boolean seekBefore) { // this does a deep copy of the key byte[] because the CellSearcher // interface wants a Cell CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv); @@ -211,7 +213,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { return seekToOrBeforeUsingPositionAtOrAfter(kv, seekBefore); } - protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) { + protected int seekToOrBeforeUsingPositionAtOrAfter(ServerCell kv, boolean seekBefore) { // should probably switch this to use the seekForwardToOrBefore method CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv); @@ -249,7 +251,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { } @Override - public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) { + public int seekToKeyInBlock(ServerCell key, boolean forceBeforeOnExactMatch) { if (USE_POSITION_BEFORE) { return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch); } else { @@ -258,7 +260,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { } @Override - public int compareKey(KVComparator comparator, Cell key) { + public int compareKey(KVComparator comparator, ServerCell key) { ByteBuffer bb = getKeyDeepCopy(); return comparator.compare(key, new KeyValue.KeyOnlyKeyValue(bb.array(), bb.arrayOffset(), bb.limit())); @@ -268,7 +270,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { * of the key part is deep copied * */ - private static class ClonedPrefixTreeCell implements Cell, SettableSequenceId, HeapSize { + private static class ClonedPrefixTreeCell implements ServerCell, HeapSize { private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT) + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY)); @@ -451,5 +453,54 @@ public class PrefixTreeSeeker implements EncodedSeeker { public long heapSize() { return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength; } + + @Override + public boolean hasArray() { + return true; + } + + @Override + public void oswrite(OutputStream out, boolean withTags) throws IOException { + // write total length + StreamUtils.writeInt(out, + KeyValueUtil.length(rowLength, famLength, qualLength, valLength, tagsLength, withTags)); + // write key length + StreamUtils.writeInt(out, KeyValueUtil.keyLength(rowLength, famLength, qualLength)); + // write value length + StreamUtils.writeInt(out, valLength); + // Write rowkey - 2 bytes rk length followed by rowkey bytes + StreamUtils.writeShort(out, rowLength); + out.write(row); + // Write cf - 1 byte of cf length followed by the family bytes + out.write(famLength); + out.write(fam); + // write qualifier + out.write(qual); + // write timestamp + StreamUtils.writeLong(out, ts); + // write the type + out.write(type); + // write value + out.write(val, valOffset, valLength); + // write tags if we have to + if (withTags) { + // 2 bytes tags length followed by tags bytes + // tags length is serialized with 2 bytes only(short way) even if the type is int. As this + // is non -ve numbers, we save the sign bit. See HBASE-11437 + out.write((byte) (0xff & (tagsLength >> 8))); + out.write((byte) (0xff & tagsLength)); + out.write(tag); + } + } + + @Override + public void setTimestamp(long ts) throws IOException { + this.ts = ts; + } + + @Override + public void setTimestamp(byte[] ts, int tsOffset) throws IOException { + this.ts = Bytes.toLong(ts, tsOffset); + } } } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java index cb7eeea..8734b88 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java @@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.decode.column.ColumnReader; import org.apache.hadoop.hbase.codec.prefixtree.decode.row.RowNodeReader; @@ -154,11 +154,11 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne /********************** CellScanner **********************/ @Override - public Cell current() { + public ServerCell current() { if(isOutOfBounds()){ return null; } - return (Cell)this; + return (ServerCell)this; } /******************* Object methods ************************/ @@ -179,7 +179,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne */ @Override public String toString() { - Cell currentCell = current(); + ServerCell currentCell = current(); if(currentCell==null){ return "null"; } @@ -418,7 +418,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne /********************* fill in family/qualifier/ts/type/value ************/ - protected int populateNonRowFieldsAndCompareTo(int cellNum, Cell key) { + protected int populateNonRowFieldsAndCompareTo(int cellNum, ServerCell key) { populateNonRowFields(cellNum); return CellComparator.compare(this, key, true); } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java index ec54c2a..942f89b 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; @@ -56,12 +56,12 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im /********************* CellSearcher methods *******************/ @Override - public boolean positionAt(Cell key) { + public boolean positionAt(ServerCell key) { return CellScannerPosition.AT == positionAtOrAfter(key); } @Override - public CellScannerPosition positionAtOrBefore(Cell key) { + public CellScannerPosition positionAtOrBefore(ServerCell key) { reInitFirstNode(); int fanIndex = -1; @@ -106,7 +106,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im * if-statements. Priority on readability and debugability. */ @Override - public CellScannerPosition positionAtOrAfter(Cell key) { + public CellScannerPosition positionAtOrAfter(ServerCell key) { reInitFirstNode(); int fanIndex = -1; @@ -151,7 +151,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im } @Override - public boolean seekForwardTo(Cell key) { + public boolean seekForwardTo(ServerCell key) { if(currentPositionIsAfter(key)){ //our position is after the requested key, so can't do anything return false; @@ -160,7 +160,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im } @Override - public CellScannerPosition seekForwardToOrBefore(Cell key) { + public CellScannerPosition seekForwardToOrBefore(ServerCell key) { //Do we even need this check or should upper layers avoid this situation. It's relatively //expensive compared to the rest of the seek operation. if(currentPositionIsAfter(key)){ @@ -172,7 +172,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im } @Override - public CellScannerPosition seekForwardToOrAfter(Cell key) { + public CellScannerPosition seekForwardToOrAfter(ServerCell key) { //Do we even need this check or should upper layers avoid this situation. It's relatively //expensive compared to the rest of the seek operation. if(currentPositionIsAfter(key)){ @@ -205,11 +205,11 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im /****************** internal methods ************************/ - protected boolean currentPositionIsAfter(Cell cell){ + protected boolean currentPositionIsAfter(ServerCell cell){ return compareTo(cell) > 0; } - protected CellScannerPosition positionAtQualifierTimestamp(Cell key, boolean beforeOnMiss) { + protected CellScannerPosition positionAtQualifierTimestamp(ServerCell key, boolean beforeOnMiss) { int minIndex = 0; int maxIndex = currentRowNode.getLastCellIndex(); int diff; @@ -262,7 +262,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im * @param key Cell being searched for * @return true if row buffer contents match key.row */ - protected boolean rowMatchesAfterCurrentPosition(Cell key) { + protected boolean rowMatchesAfterCurrentPosition(ServerCell key) { if (!currentRowNode.hasOccurrences()) { return false; } @@ -279,7 +279,7 @@ public class PrefixTreeArraySearcher extends PrefixTreeArrayReversibleScanner im * @param key * @return return -1 if key is lessThan (before) this, 0 if equal, and 1 if key is after */ - protected int compareToCurrentToken(Cell key) { + protected int compareToCurrentToken(ServerCell key) { int startIndex = rowLength - currentRowNode.getTokenLength(); int endIndexExclusive = startIndex + currentRowNode.getTokenLength(); for (int i = startIndex; i < endIndexExclusive; ++i) { diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java index 97eed62..2b37952 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java @@ -18,13 +18,16 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode; -import org.apache.hadoop.hbase.Cell; +import java.io.IOException; +import java.io.OutputStream; + import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.SettableSequenceId; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.util.Bytes; /** * As the PrefixTreeArrayScanner moves through the tree bytes, it changes the values in the fields @@ -32,7 +35,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; * iterated through. */ @InterfaceAudience.Private -public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable { +public class PrefixTreeCell implements ServerCell, Comparable { /********************** static **********************/ @@ -87,11 +90,11 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable */ @InterfaceAudience.Private -public class PrefixTreeEncoder implements CellOutputStream { +public class PrefixTreeEncoder implements ServerCellOutputStream { /**************** static ************************/ @@ -259,7 +261,7 @@ public class PrefixTreeEncoder implements CellOutputStream { * Add a Cell to the output stream but repeat the previous row. */ //@Override - public void writeWithRepeatRow(Cell cell) { + public void writeWithRepeatRow(ServerCell cell) { ensurePerCellCapacities();//can we optimize away some of this? //save a relatively expensive row comparison, incrementing the row's counter instead @@ -269,9 +271,14 @@ public class PrefixTreeEncoder implements CellOutputStream { addAfterRowFamilyQualifier(cell); } - @Override public void write(Cell cell) { + // When encoding what we will get is ServerCell. We will never get to here. + // TODO + } + + @Override + public void write(ServerCell cell) { ensurePerCellCapacities(); rowTokenizer.addSorted(CellUtil.fillRowRange(cell, rowRange)); @@ -282,14 +289,14 @@ public class PrefixTreeEncoder implements CellOutputStream { } - private void addTagPart(Cell cell) { + private void addTagPart(ServerCell cell) { CellUtil.fillTagRange(cell, tagsRange); tagsDeduplicator.add(tagsRange); } /***************** internal add methods ************************/ - private void addAfterRowFamilyQualifier(Cell cell){ + private void addAfterRowFamilyQualifier(ServerCell cell){ // timestamps timestamps[totalCells] = cell.getTimestamp(); timestampEncoder.add(cell.getTimestamp()); @@ -327,14 +334,14 @@ public class PrefixTreeEncoder implements CellOutputStream { ++totalCells; } - private void addFamilyPart(Cell cell) { + private void addFamilyPart(ServerCell cell) { if (MULITPLE_FAMILIES_POSSIBLE || totalCells == 0) { CellUtil.fillFamilyRange(cell, familyRange); familyDeduplicator.add(familyRange); } } - private void addQualifierPart(Cell cell) { + private void addQualifierPart(ServerCell cell) { CellUtil.fillQualifierRange(cell, qualifierRange); qualifierDeduplicator.add(qualifierRange); } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java index 7e83457..1d976bf 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.scanner; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; /** * Methods for seeking to a random {@link Cell} inside a sorted collection of cells. Indicates that @@ -40,7 +40,7 @@ public interface CellSearcher extends ReversibleCellScanner { * @param key position the CellScanner exactly on this key * @return true if the cell existed and getCurrentCell() holds a valid cell */ - boolean positionAt(Cell key); + boolean positionAt(ServerCell key); /** * Same as positionAt(..), but go to the extra effort of finding the previous key if there's no @@ -51,7 +51,7 @@ public interface CellSearcher extends ReversibleCellScanner { * BEFORE if on last cell before key
* BEFORE_FIRST if key was before the first cell in this scanner's scope */ - CellScannerPosition positionAtOrBefore(Cell key); + CellScannerPosition positionAtOrBefore(ServerCell key); /** * Same as positionAt(..), but go to the extra effort of finding the next key if there's no exact @@ -62,7 +62,7 @@ public interface CellSearcher extends ReversibleCellScanner { * AFTER if on first cell after key
* AFTER_LAST if key was after the last cell in this scanner's scope */ - CellScannerPosition positionAtOrAfter(Cell key); + CellScannerPosition positionAtOrAfter(ServerCell key); /** * Note: Added for backwards compatibility with @@ -75,7 +75,7 @@ public interface CellSearcher extends ReversibleCellScanner { * @param key position the CellScanner exactly on this key * @return true if getCurrent() holds a valid cell */ - boolean seekForwardTo(Cell key); + boolean seekForwardTo(ServerCell key); /** * Same as seekForwardTo(..), but go to the extra effort of finding the next key if there's no @@ -86,7 +86,7 @@ public interface CellSearcher extends ReversibleCellScanner { * AFTER if on first cell after key
* AFTER_LAST if key was after the last cell in this scanner's scope */ - CellScannerPosition seekForwardToOrBefore(Cell key); + CellScannerPosition seekForwardToOrBefore(ServerCell key); /** * Same as seekForwardTo(..), but go to the extra effort of finding the next key if there's no @@ -97,7 +97,7 @@ public interface CellSearcher extends ReversibleCellScanner { * AFTER if on first cell after key
* AFTER_LAST if key was after the last cell in this scanner's scope */ - CellScannerPosition seekForwardToOrAfter(Cell key); + CellScannerPosition seekForwardToOrAfter(ServerCell key); /** * Note: This may not be appropriate to have in the interface. Need to investigate. diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java index edea305..c140772 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java @@ -24,6 +24,7 @@ import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; @@ -91,7 +92,7 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ private void testBetween1and2(CellSearcher searcher){ CellScannerPosition p;//reuse - Cell betweenAAndAAA = new KeyValue(AA, cf, cq, ts-2, v); + ServerCell betweenAAndAAA = new KeyValue(AA, cf, cq, ts-2, v); //test exact Assert.assertFalse(searcher.positionAt(betweenAAndAAA)); @@ -109,7 +110,7 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ private void testBetween2and3(CellSearcher searcher){ CellScannerPosition p;//reuse - Cell betweenAAAndB = new KeyValue(AAA, cf, cq, ts-2, v); + ServerCell betweenAAAndB = new KeyValue(AAA, cf, cq, ts-2, v); //test exact Assert.assertFalse(searcher.positionAt(betweenAAAndB)); diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java index 6c3750a..dbc4b89 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java @@ -24,6 +24,7 @@ import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; @@ -79,7 +80,7 @@ public class TestRowDataSimple extends BaseTestRowData { Assert.assertTrue(searcher.positionAt(d.get(3))); Assert.assertTrue(CellComparator.equals(d.get(3), searcher.current())); - Cell between4And5 = new KeyValue(rowB, cf, cq1, ts - 2, v0); + ServerCell between4And5 = new KeyValue(rowB, cf, cq1, ts - 2, v0); // test exact Assert.assertFalse(searcher.positionAt(between4And5)); @@ -95,7 +96,7 @@ public class TestRowDataSimple extends BaseTestRowData { Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(5))); // test when key falls before first key in block - Cell beforeFirst = new KeyValue(Bytes.toBytes("A"), cf, cq0, ts, v0); + ServerCell beforeFirst = new KeyValue(Bytes.toBytes("A"), cf, cq0, ts, v0); Assert.assertFalse(searcher.positionAt(beforeFirst)); p = searcher.positionAtOrBefore(beforeFirst); Assert.assertEquals(CellScannerPosition.BEFORE_FIRST, p); @@ -105,7 +106,7 @@ public class TestRowDataSimple extends BaseTestRowData { Assert.assertEquals(d.get(0), searcher.current()); // test when key falls after last key in block - Cell afterLast = new KeyValue(Bytes.toBytes("z"), cf, cq0, ts, v0);// must be lower case z + ServerCell afterLast = new KeyValue(Bytes.toBytes("z"), cf, cq0, ts, v0);// must be lower case z Assert.assertFalse(searcher.positionAt(afterLast)); p = searcher.positionAtOrAfter(afterLast); Assert.assertEquals(CellScannerPosition.AFTER_LAST, p); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java index 313ecb8..ff9ab14 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; +import java.io.OutputStream; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.HeapSize; @@ -29,7 +30,7 @@ import org.apache.hadoop.hbase.util.ClassSize; * parts, refer to the original Cell. */ @InterfaceAudience.Private -public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimestamp, HeapSize { +public class TagRewriteCell implements ServerCell, HeapSize { private Cell cell; private byte[] tags; @@ -39,8 +40,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta * @param tags the tags bytes. The array suppose to contain the tags bytes alone. */ public TagRewriteCell(Cell cell, byte[] tags) { - assert cell instanceof SettableSequenceId; - assert cell instanceof SettableTimestamp; this.cell = cell; this.tags = tags; // tag offset will be treated as 0 and length this.tags.length @@ -199,4 +198,19 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta // The incoming cell is supposed to be SettableSequenceId type. CellUtil.setSequenceId(cell, seqId); } + + @Override + public boolean hasArray() { + return true; + } + + @Override + public void oswrite(OutputStream out, boolean withTags) throws IOException { + // Write all the parts except the tags part + CellUtil.oswrite(cell, out, withTags); + if (withTags && this.tags != null) { + // Write tags if asked for + out.write(tags); + } + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java index a80a07e..899ba83 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java @@ -26,10 +26,10 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.RegionScanner; @@ -44,7 +44,7 @@ public class ClientSideRegionScanner extends AbstractClientScanner { private HRegion region; RegionScanner scanner; - List values; + List values; public ClientSideRegionScanner(Configuration conf, FileSystem fs, Path rootDir, HTableDescriptor htd, HRegionInfo hri, Scan scan, ScanMetrics scanMetrics) @@ -58,7 +58,7 @@ public class ClientSideRegionScanner extends AbstractClientScanner { // create an internal region scanner this.scanner = region.getScanner(scan); - values = new ArrayList(); + values = new ArrayList(); if (scanMetrics == null) { initScanMetrics(scan); @@ -84,7 +84,7 @@ public class ClientSideRegionScanner extends AbstractClientScanner { Result result = Result.create(values); if (this.scanMetrics != null) { long resultSize = 0; - for (Cell cell : values) { + for (ServerCell cell : values) { resultSize += CellUtil.estimatedSerializedSizeOf(cell); } this.scanMetrics.countOfBytesInResults.addAndGet(resultSize); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java index 6c894a5..c4a916a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.protobuf.generated.CellProtos; /** @@ -58,6 +59,13 @@ public class MessageCodec implements Codec { CellProtos.Cell pbcell = builder.build(); pbcell.writeDelimitedTo(this.out); } + + @Override + public void write(ServerCell cell) throws IOException { + // TODO + // Same impl as above method. But check for hasArray and if false, deal with getXXXBuffer + // APIs. + } } static class MessageDecoder extends BaseDecoder { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index b6f834e..2d493ac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -26,9 +26,9 @@ import java.util.NavigableSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; @@ -82,7 +82,7 @@ extends AggregateService implements CoprocessorService, Coprocessor { T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); byte[] colFamily = scan.getFamilies()[0]; NavigableSet qualifiers = scan.getFamilyMap().get(colFamily); byte[] qualifier = null; @@ -137,7 +137,7 @@ extends AggregateService implements CoprocessorService, Coprocessor { T temp; Scan scan = ProtobufUtil.toScan(request.getScan()); scanner = env.getRegion().getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); byte[] colFamily = scan.getFamilies()[0]; NavigableSet qualifiers = scan.getFamilyMap().get(colFamily); byte[] qualifier = null; @@ -197,7 +197,7 @@ extends AggregateService implements CoprocessorService, Coprocessor { if (qualifiers != null && !qualifiers.isEmpty()) { qualifier = qualifiers.pollFirst(); } - List results = new ArrayList(); + List results = new ArrayList(); boolean hasMoreRows = false; do { hasMoreRows = NextState.hasMoreValues(scanner.next(results)); @@ -237,7 +237,7 @@ extends AggregateService implements CoprocessorService, Coprocessor { RpcCallback done) { AggregateResponse response = null; long counter = 0l; - List results = new ArrayList(); + List results = new ArrayList(); InternalScanner scanner = null; try { Scan scan = ProtobufUtil.toScan(request.getScan()); @@ -308,7 +308,7 @@ extends AggregateService implements CoprocessorService, Coprocessor { if (qualifiers != null && !qualifiers.isEmpty()) { qualifier = qualifiers.pollFirst(); } - List results = new ArrayList(); + List results = new ArrayList(); boolean hasMoreRows = false; do { @@ -368,7 +368,7 @@ extends AggregateService implements CoprocessorService, Coprocessor { if (qualifiers != null && !qualifiers.isEmpty()) { qualifier = qualifiers.pollFirst(); } - List results = new ArrayList(); + List results = new ArrayList(); boolean hasMoreRows = false; @@ -434,7 +434,7 @@ extends AggregateService implements CoprocessorService, Coprocessor { // if weighted median is requested, get qualifier for the weight column weightQualifier = qualifiers.pollLast(); } - List results = new ArrayList(); + List results = new ArrayList(); boolean hasMoreRows = false; @@ -444,7 +444,7 @@ extends AggregateService implements CoprocessorService, Coprocessor { hasMoreRows = NextState.hasMoreValues(scanner.next(results)); int listSize = results.size(); for (int i = 0; i < listSize; i++) { - Cell kv = results.get(i); + ServerCell kv = results.get(i); tempVal = ci.add(tempVal, ci.castToReturnType(ci.getValue(colFamily, valQualifier, kv))); if (weightQualifier != null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java index 9e0cb9b..0850afa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -232,12 +233,12 @@ public abstract class BaseRegionObserver implements RegionObserver { @Override public void preGetOp(final ObserverContext e, - final Get get, final List results) throws IOException { + final Get get, final List results) throws IOException { } @Override public void postGetOp(final ObserverContext e, - final Get get, final List results) throws IOException { + final Get get, final List results) throws IOException { } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java index 7ee5a99..51153a7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -481,7 +482,7 @@ public interface RegionObserver extends Coprocessor { * @throws IOException if an error occurred on the coprocessor */ void preGetOp(final ObserverContext c, final Get get, - final List result) + final List result) throws IOException; /** @@ -495,7 +496,7 @@ public interface RegionObserver extends Coprocessor { * @throws IOException if an error occurred on the coprocessor */ void postGetOp(final ObserverContext c, final Get get, - final List result) + final List result) throws IOException; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java index 43bbab5..4384636 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java @@ -27,9 +27,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileScanner; @@ -57,7 +57,7 @@ public class HalfStoreFileReader extends StoreFile.Reader { // i.e. empty column and a timestamp of LATEST_TIMESTAMP. protected final byte [] splitkey; - protected final Cell splitCell; + protected final ServerCell splitCell; private byte[] firstKey = null; @@ -148,7 +148,7 @@ public class HalfStoreFileReader extends StoreFile.Reader { return delegate.getValueString(); } - public Cell getKeyValue() { + public ServerCell getKeyValue() { if (atEnd) return null; return delegate.getKeyValue(); @@ -244,7 +244,7 @@ public class HalfStoreFileReader extends StoreFile.Reader { } @Override - public int seekTo(Cell key) throws IOException { + public int seekTo(ServerCell key) throws IOException { if (top) { if (getComparator().compareOnlyKeyPortion(key, splitCell) < 0) { return -1; @@ -266,7 +266,7 @@ public class HalfStoreFileReader extends StoreFile.Reader { } @Override - public int reseekTo(Cell key) throws IOException { + public int reseekTo(ServerCell key) throws IOException { // This function is identical to the corresponding seekTo function // except // that we call reseekTo (and not seekTo) on the delegate. @@ -294,9 +294,9 @@ public class HalfStoreFileReader extends StoreFile.Reader { } @Override - public boolean seekBefore(Cell key) throws IOException { + public boolean seekBefore(ServerCell key) throws IOException { if (top) { - Cell fk = new KeyValue.KeyOnlyKeyValue(getFirstKey(), 0, getFirstKey().length); + ServerCell fk = new KeyValue.KeyOnlyKeyValue(getFirstKey(), 0, getFirstKey().length); if (getComparator().compareOnlyKeyPortion(key, fk) <= 0) { return false; } @@ -319,7 +319,7 @@ public class HalfStoreFileReader extends StoreFile.Reader { } @Override - public Cell getNextIndexedKey() { + public ServerCell getNextIndexedKey() { return null; } }; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java index 52491e6..9c6eb77 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java @@ -29,11 +29,11 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -49,7 +49,7 @@ import org.apache.hadoop.io.Writable; public abstract class AbstractHFileWriter implements HFile.Writer { /** The Cell previously appended. Becomes the last cell in the file.*/ - protected Cell lastCell = null; + protected ServerCell lastCell = null; /** FileSystem stream to write into. */ protected FSDataOutputStream outputStream; @@ -85,7 +85,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer { * First cell in a block. * This reference should be short-lived since we write hfiles in a burst. */ - protected Cell firstCellInBlock = null; + protected ServerCell firstCellInBlock = null; /** May be null if we were passed a stream. */ protected final Path path; @@ -191,7 +191,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer { * @return true if the key is duplicate * @throws IOException if the key or the key order is wrong */ - protected boolean checkKey(final Cell cell) throws IOException { + protected boolean checkKey(final ServerCell cell) throws IOException { boolean isDuplicateKey = false; if (cell == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java index 4a5bb64..1b20182 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.io.hfile; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; /** * BlockWithScanInfo is wrapper class for HFileBlock with other attributes. These attributes are @@ -29,9 +29,9 @@ public class BlockWithScanInfo { * The first key in the next block following this one in the HFile. * If this key is unknown, this is reference-equal with HConstants.NO_NEXT_INDEXED_KEY */ - private final Cell nextIndexedKey; + private final ServerCell nextIndexedKey; - public BlockWithScanInfo(HFileBlock hFileBlock, Cell nextIndexedKey) { + public BlockWithScanInfo(HFileBlock hFileBlock, ServerCell nextIndexedKey) { this.hFileBlock = hFileBlock; this.nextIndexedKey = nextIndexedKey; } @@ -40,7 +40,7 @@ public class BlockWithScanInfo { return hFileBlock; } - public Cell getNextIndexedKey() { + public ServerCell getNextIndexedKey() { return nextIndexedKey; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 610fe7f..2629774 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -47,10 +47,10 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.compress.Compression; @@ -201,7 +201,7 @@ public class HFile { /** Add an element to the file info map. */ void appendFileInfo(byte[] key, byte[] value) throws IOException; - void append(Cell cell) throws IOException; + void append(ServerCell cell) throws IOException; /** @return the path to this {@link HFile} */ Path getPath(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 4115941..3ee28a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -30,8 +30,8 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.ByteBufferInputStream; @@ -861,7 +861,7 @@ public class HFileBlock implements Cacheable { * @param cell * @throws IOException */ - public void write(Cell cell) throws IOException{ + public void write(ServerCell cell) throws IOException{ expectState(State.WRITING); this.unencodedDataSizeWritten += this.dataBlockEncoder.encode(cell, dataBlockEncodingCtx, this.userDataStream); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java index 77266df..bcbb471 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java @@ -35,11 +35,11 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFile.CachingBlockReader; @@ -176,7 +176,7 @@ public class HFileBlockIndex { * @return reader a basic way to load blocks * @throws IOException */ - public HFileBlock seekToDataBlock(final Cell key, HFileBlock currentBlock, boolean cacheBlocks, + public HFileBlock seekToDataBlock(final ServerCell key, HFileBlock currentBlock, boolean cacheBlocks, boolean pread, boolean isCompaction, DataBlockEncoding expectedDataBlockEncoding) throws IOException { BlockWithScanInfo blockWithScanInfo = loadDataBlockWithScanInfo(key, currentBlock, @@ -208,7 +208,7 @@ public class HFileBlockIndex { * scan info such as nextIndexedKey. * @throws IOException */ - public BlockWithScanInfo loadDataBlockWithScanInfo(Cell key, HFileBlock currentBlock, + public BlockWithScanInfo loadDataBlockWithScanInfo(ServerCell key, HFileBlock currentBlock, boolean cacheBlocks, boolean pread, boolean isCompaction, DataBlockEncoding expectedDataBlockEncoding) throws IOException { @@ -218,7 +218,7 @@ public class HFileBlockIndex { } // the next indexed key - Cell nextIndexedKey = null; + ServerCell nextIndexedKey = null; // Read the next-level (intermediate or leaf) index block. long currentOffset = blockOffsets[rootLevelIndex]; @@ -422,7 +422,7 @@ public class HFileBlockIndex { * @param key * Key to find */ - public int rootBlockContainingKey(final Cell key) { + public int rootBlockContainingKey(final ServerCell key) { int pos = Bytes.binarySearch(blockKeys, key, comparator); // pos is between -(blockKeys.length + 1) to blockKeys.length - 1, see // binarySearch's javadoc. @@ -506,7 +506,7 @@ public class HFileBlockIndex { * -1 otherwise * @throws IOException */ - static int binarySearchNonRootIndex(Cell key, ByteBuffer nonRootIndex, + static int binarySearchNonRootIndex(ServerCell key, ByteBuffer nonRootIndex, KVComparator comparator) { int numEntries = nonRootIndex.getInt(0); @@ -595,7 +595,7 @@ public class HFileBlockIndex { * return -1 in the case the given key is before the first key. * */ - static int locateNonRootIndexEntry(ByteBuffer nonRootBlock, Cell key, + static int locateNonRootIndexEntry(ByteBuffer nonRootBlock, ServerCell key, KVComparator comparator) { int entryIndex = binarySearchNonRootIndex(key, nonRootBlock, comparator); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java index 7ba74a2..f6ba4a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.hfile; import java.io.DataOutputStream; import java.io.IOException; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; @@ -55,7 +55,7 @@ public interface HFileDataBlockEncoder { * @return unencoded kv size * @throws IOException */ - int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) + int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) throws IOException; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java index 29bc292..80513d7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.hfile; import java.io.DataOutputStream; import java.io.IOException; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; @@ -92,7 +92,7 @@ public class HFileDataBlockEncoderImpl implements HFileDataBlockEncoder { } @Override - public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) + public int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) throws IOException { return this.encoding.getEncoder().encode(cell, encodingCtx, out); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java index 7b92df9..a3d751e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java @@ -45,11 +45,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionInfo; @@ -313,9 +313,9 @@ public class HFilePrettyPrinter extends Configured implements Tool { private void scanKeysValues(Path file, KeyValueStatsCollector fileStats, HFileScanner scanner, byte[] row) throws IOException { - Cell pCell = null; + ServerCell pCell = null; do { - Cell cell = scanner.getKeyValue(); + ServerCell cell = scanner.getKeyValue(); if (row != null && row.length != 0) { int result = CellComparator.compareRows(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0, row.length); @@ -461,11 +461,11 @@ public class HFilePrettyPrinter extends Configured implements Tool { byte[] biggestRow = null; - private Cell prevCell = null; + private ServerCell prevCell = null; private long maxRowBytes = 0; private long curRowKeyLength; - public void collect(Cell cell) { + public void collect(ServerCell cell) { valLen.update(cell.getValueLength()); if (prevCell != null && KeyValue.COMPARATOR.compareRows(prevCell, cell) != 0) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java index c0e3e91..f4cc726 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java @@ -28,11 +28,11 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder; @@ -543,7 +543,7 @@ public class HFileReaderV2 extends AbstractHFileReader { protected HFileBlock block; @Override - public Cell getNextIndexedKey() { + public ServerCell getNextIndexedKey() { return nextIndexedKey; } /** @@ -553,7 +553,7 @@ public class HFileReaderV2 extends AbstractHFileReader { * * If the nextIndexedKey is null, it means the nextIndexedKey has not been loaded yet. */ - protected Cell nextIndexedKey; + protected ServerCell nextIndexedKey; public AbstractScannerV2(HFileReaderV2 r, boolean cacheBlocks, final boolean pread, final boolean isCompaction) { @@ -562,8 +562,8 @@ public class HFileReaderV2 extends AbstractHFileReader { protected abstract ByteBuffer getFirstKeyInBlock(HFileBlock curBlock); - protected abstract int loadBlockAndSeekToKey(HFileBlock seekToBlock, Cell nextIndexedKey, - boolean rewind, Cell key, boolean seekBefore) throws IOException; + protected abstract int loadBlockAndSeekToKey(HFileBlock seekToBlock, ServerCell nextIndexedKey, + boolean rewind, ServerCell key, boolean seekBefore) throws IOException; @Override public int seekTo(byte[] key, int offset, int length) throws IOException { @@ -578,12 +578,12 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public int seekTo(Cell key) throws IOException { + public int seekTo(ServerCell key) throws IOException { return seekTo(key, true); } @Override - public int reseekTo(Cell key) throws IOException { + public int reseekTo(ServerCell key) throws IOException { int compared; if (isSeeked()) { compared = compareKey(reader.getComparator(), key); @@ -627,7 +627,7 @@ public class HFileReaderV2 extends AbstractHFileReader { * using a faked index key * @throws IOException */ - public int seekTo(Cell key, boolean rewind) throws IOException { + public int seekTo(ServerCell key, boolean rewind) throws IOException { HFileBlockIndex.BlockIndexReader indexReader = reader.getDataBlockIndexReader(); BlockWithScanInfo blockWithScanInfo = indexReader.loadDataBlockWithScanInfo(key, block, cacheBlocks, pread, isCompaction, getEffectiveDataBlockEncoding()); @@ -645,7 +645,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public boolean seekBefore(Cell key) throws IOException { + public boolean seekBefore(ServerCell key) throws IOException { HFileBlock seekToBlock = reader.getDataBlockIndexReader().seekToDataBlock(key, block, cacheBlocks, pread, isCompaction, ((HFileReaderV2) reader).getEffectiveEncodingInCache(isCompaction)); @@ -674,7 +674,7 @@ public class HFileReaderV2 extends AbstractHFileReader { // TODO shortcut: seek forward in this block to the last key of the // block. } - Cell firstKeyInCurrentBlock = new KeyValue.KeyOnlyKeyValue(Bytes.getBytes(firstKey)); + ServerCell firstKeyInCurrentBlock = new KeyValue.KeyOnlyKeyValue(Bytes.getBytes(firstKey)); loadBlockAndSeekToKey(seekToBlock, firstKeyInCurrentBlock, true, key, true); return true; } @@ -726,7 +726,7 @@ public class HFileReaderV2 extends AbstractHFileReader { public abstract int compareKey(KVComparator comparator, byte[] key, int offset, int length); - public abstract int compareKey(KVComparator comparator, Cell kv); + public abstract int compareKey(KVComparator comparator, ServerCell kv); } /** @@ -742,7 +742,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public Cell getKeyValue() { + public ServerCell getKeyValue() { if (!isSeeked()) return null; @@ -879,8 +879,8 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - protected int loadBlockAndSeekToKey(HFileBlock seekToBlock, Cell nextIndexedKey, - boolean rewind, Cell key, boolean seekBefore) throws IOException { + protected int loadBlockAndSeekToKey(HFileBlock seekToBlock, ServerCell nextIndexedKey, + boolean rewind, ServerCell key, boolean seekBefore) throws IOException { if (block == null || block.getOffset() != seekToBlock.getOffset()) { updateCurrBlock(seekToBlock); } else if (rewind) { @@ -969,7 +969,7 @@ public class HFileReaderV2 extends AbstractHFileReader { * less than the first key of current block(e.g. using a faked index * key) */ - protected int blockSeek(Cell key, boolean seekBefore) { + protected int blockSeek(ServerCell key, boolean seekBefore) { int klen, vlen; long memstoreTS = 0; int memstoreTSLen = 0; @@ -1072,7 +1072,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public int compareKey(KVComparator comparator, Cell key) { + public int compareKey(KVComparator comparator, ServerCell key) { return comparator.compareOnlyKeyPortion( key, new KeyValue.KeyOnlyKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() @@ -1203,7 +1203,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public Cell getKeyValue() { + public ServerCell getKeyValue() { if (block == null) { return null; } @@ -1236,8 +1236,8 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - protected int loadBlockAndSeekToKey(HFileBlock seekToBlock, Cell nextIndexedKey, - boolean rewind, Cell key, boolean seekBefore) throws IOException { + protected int loadBlockAndSeekToKey(HFileBlock seekToBlock, ServerCell nextIndexedKey, + boolean rewind, ServerCell key, boolean seekBefore) throws IOException { if (block == null || block.getOffset() != seekToBlock.getOffset()) { updateCurrentBlock(seekToBlock); } else if (rewind) { @@ -1248,7 +1248,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public int compareKey(KVComparator comparator, Cell key) { + public int compareKey(KVComparator comparator, ServerCell key) { return seeker.compareKey(comparator, key); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java index b28d8c1..e6d7c35 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java @@ -26,10 +26,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.crypto.Cipher; @@ -245,7 +245,7 @@ public class HFileReaderV3 extends HFileReaderV2 { * key) */ @Override - protected int blockSeek(Cell key, boolean seekBefore) { + protected int blockSeek(ServerCell key, boolean seekBefore) { int klen, vlen, tlen = 0; long memstoreTS = 0; int memstoreTSLen = 0; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java index deaa2c0..2aecfa0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; /** * A scanner allows you to position yourself within a HFile and @@ -59,7 +59,7 @@ public interface HFileScanner { @Deprecated int seekTo(byte[] key, int offset, int length) throws IOException; - int seekTo(Cell c) throws IOException; + int seekTo(ServerCell c) throws IOException; /** * Reseek to or just before the passed key. Similar to seekTo * except that this can be called even if the scanner is not at the beginning @@ -85,7 +85,7 @@ public interface HFileScanner { @Deprecated int reseekTo(byte[] key, int offset, int length) throws IOException; - int reseekTo(Cell c) throws IOException; + int reseekTo(ServerCell c) throws IOException; /** * Consider the key stream of all the keys in the file, * k[0] .. k[n], where there are n keys in the file. @@ -101,7 +101,7 @@ public interface HFileScanner { @Deprecated boolean seekBefore(byte[] key, int offset, int length) throws IOException; - boolean seekBefore(Cell kv) throws IOException; + boolean seekBefore(ServerCell kv) throws IOException; /** * Positions this scanner at the start of the file. * @return False if empty file; i.e. a call to next would return false and @@ -133,7 +133,7 @@ public interface HFileScanner { /** * @return Instance of {@link org.apache.hadoop.hbase.KeyValue}. */ - Cell getKeyValue(); + ServerCell getKeyValue(); /** * Convenience method to get a copy of the key as a string - interpreting the * bytes as UTF8. You must call {@link #seekTo(byte[])} before this method. @@ -160,5 +160,5 @@ public interface HFileScanner { /** * @return the next key in the index (the key to seek to the next block) */ - Cell getNextIndexedKey(); + ServerCell getNextIndexedKey(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java index 28c4655..32b3afb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java @@ -31,10 +31,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable; @@ -80,7 +80,7 @@ public class HFileWriterV2 extends AbstractHFileWriter { * The last(stop) Cell of the previous data block. * This reference should be short-lived since we write hfiles in a burst. */ - private Cell lastCellOfPreviousBlock = null; + private ServerCell lastCellOfPreviousBlock = null; /** Additional data items to be written to the "load-on-open" section. */ private List additionalLoadOnOpenData = @@ -162,7 +162,7 @@ public class HFileWriterV2 extends AbstractHFileWriter { fsBlockWriter.writeHeaderAndData(outputStream); int onDiskSize = fsBlockWriter.getOnDiskSizeWithHeader(); - Cell indexEntry = + ServerCell indexEntry = CellComparator.getMidpoint(this.comparator, lastCellOfPreviousBlock, firstCellInBlock); dataBlockIndexWriter.addEntry(CellUtil.getCellKeySerializedAsKeyValueKey(indexEntry), lastDataBlockOffset, onDiskSize); @@ -252,7 +252,7 @@ public class HFileWriterV2 extends AbstractHFileWriter { * @throws IOException */ @Override - public void append(final Cell cell) throws IOException { + public void append(final ServerCell cell) throws IOException { byte[] value = cell.getValueArray(); int voffset = cell.getValueOffset(); int vlength = cell.getValueLength(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java index 086395c..6fb167c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java @@ -26,9 +26,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; @@ -82,7 +82,7 @@ public class HFileWriterV3 extends HFileWriterV2 { * @throws IOException */ @Override - public void append(final Cell cell) throws IOException { + public void append(final ServerCell cell) throws IOException { // Currently get the complete arrays super.append(cell); int tagsLength = cell.getTagsLength(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java index f75f6e9..ed51056 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java @@ -20,10 +20,10 @@ import java.io.DataOutputStream; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; @@ -45,7 +45,7 @@ public class NoOpDataBlockEncoder implements HFileDataBlockEncoder { } @Override - public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) + public int encode(ServerCell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) throws IOException { int klength = KeyValueUtil.keyLength(cell); int vlength = cell.getValueLength(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index 9c5b5af..11880bd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -110,7 +111,7 @@ public class Import extends Configured implements Tool { } if (filter == null || !filter.filterRowKey(row.get(), row.getOffset(), row.getLength())) { for (Cell kv : value.rawCells()) { - kv = filterKv(filter, kv); + kv = filterKv(filter, (ServerCell) kv); // skip if we filtered it out if (kv == null) continue; // TODO get rid of ensureKeyValue @@ -171,7 +172,7 @@ public class Import extends Configured implements Tool { protected void processKV(ImmutableBytesWritable key, Result result, Context context, Put put, Delete delete) throws IOException, InterruptedException { for (Cell kv : result.rawCells()) { - kv = filterKv(filter, kv); + kv = filterKv(filter, (ServerCell) kv); // skip if we filter it out if (kv == null) continue; @@ -312,7 +313,7 @@ public class Import extends Configured implements Tool { * @return null if the key should not be written, otherwise returns the original * {@link KeyValue} */ - public static Cell filterKv(Filter filter, Cell kv) throws IOException { + public static Cell filterKv(Filter filter, ServerCell kv) throws IOException { // apply the filter and skip this kv if the filter doesn't apply if (filter != null) { Filter.ReturnCode code = filter.filterKeyValue(kv); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSkipListSet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSkipListSet.java index 4c3ab50..774a019 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSkipListSet.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSkipListSet.java @@ -26,8 +26,8 @@ import java.util.SortedSet; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** @@ -45,96 +45,96 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; * get and set and won't throw ConcurrentModificationException when iterating. */ @InterfaceAudience.Private -public class CellSkipListSet implements NavigableSet { - private final ConcurrentNavigableMap delegatee; +public class CellSkipListSet implements NavigableSet { + private final ConcurrentNavigableMap delegatee; CellSkipListSet(final KeyValue.KVComparator c) { - this.delegatee = new ConcurrentSkipListMap(c); + this.delegatee = new ConcurrentSkipListMap(c); } - CellSkipListSet(final ConcurrentNavigableMap m) { + CellSkipListSet(final ConcurrentNavigableMap m) { this.delegatee = m; } - public Cell ceiling(Cell e) { + public ServerCell ceiling(ServerCell e) { throw new UnsupportedOperationException("Not implemented"); } - public Iterator descendingIterator() { + public Iterator descendingIterator() { return this.delegatee.descendingMap().values().iterator(); } - public NavigableSet descendingSet() { + public NavigableSet descendingSet() { throw new UnsupportedOperationException("Not implemented"); } - public Cell floor(Cell e) { + public ServerCell floor(ServerCell e) { throw new UnsupportedOperationException("Not implemented"); } - public SortedSet headSet(final Cell toElement) { + public SortedSet headSet(final ServerCell toElement) { return headSet(toElement, false); } - public NavigableSet headSet(final Cell toElement, + public NavigableSet headSet(final ServerCell toElement, boolean inclusive) { return new CellSkipListSet(this.delegatee.headMap(toElement, inclusive)); } - public Cell higher(Cell e) { + public ServerCell higher(ServerCell e) { throw new UnsupportedOperationException("Not implemented"); } - public Iterator iterator() { + public Iterator iterator() { return this.delegatee.values().iterator(); } - public Cell lower(Cell e) { + public ServerCell lower(ServerCell e) { throw new UnsupportedOperationException("Not implemented"); } - public Cell pollFirst() { + public ServerCell pollFirst() { throw new UnsupportedOperationException("Not implemented"); } - public Cell pollLast() { + public ServerCell pollLast() { throw new UnsupportedOperationException("Not implemented"); } - public SortedSet subSet(Cell fromElement, Cell toElement) { + public SortedSet subSet(ServerCell fromElement, ServerCell toElement) { throw new UnsupportedOperationException("Not implemented"); } - public NavigableSet subSet(Cell fromElement, - boolean fromInclusive, Cell toElement, boolean toInclusive) { + public NavigableSet subSet(ServerCell fromElement, + boolean fromInclusive, ServerCell toElement, boolean toInclusive) { throw new UnsupportedOperationException("Not implemented"); } - public SortedSet tailSet(Cell fromElement) { + public SortedSet tailSet(ServerCell fromElement) { return tailSet(fromElement, true); } - public NavigableSet tailSet(Cell fromElement, boolean inclusive) { + public NavigableSet tailSet(ServerCell fromElement, boolean inclusive) { return new CellSkipListSet(this.delegatee.tailMap(fromElement, inclusive)); } - public Comparator comparator() { + public Comparator comparator() { throw new UnsupportedOperationException("Not implemented"); } - public Cell first() { + public ServerCell first() { return this.delegatee.get(this.delegatee.firstKey()); } - public Cell last() { + public ServerCell last() { return this.delegatee.get(this.delegatee.lastKey()); } - public boolean add(Cell e) { + public boolean add(ServerCell e) { return this.delegatee.put(e, e) == null; } - public boolean addAll(Collection c) { + public boolean addAll(Collection c) { throw new UnsupportedOperationException("Not implemented"); } @@ -167,7 +167,7 @@ public class CellSkipListSet implements NavigableSet { throw new UnsupportedOperationException("Not implemented"); } - public Cell get(Cell kv) { + public ServerCell get(ServerCell kv) { return this.delegatee.get(kv); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java index 3da0c0b..32e6a0b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java @@ -33,12 +33,12 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.Bytes; @@ -131,10 +131,10 @@ public class DefaultMemStore implements MemStore { } void dump() { - for (Cell cell: this.cellSet) { + for (ServerCell cell: this.cellSet) { LOG.info(cell); } - for (Cell cell: this.snapshot) { + for (ServerCell cell: this.snapshot) { LOG.info(cell); } } @@ -221,9 +221,9 @@ public class DefaultMemStore implements MemStore { * passed-in KV */ @Override - public Pair add(Cell cell) { - Cell toAdd = maybeCloneWithAllocator(cell); - return new Pair(internalAdd(toAdd), toAdd); + public Pair add(ServerCell cell) { + ServerCell toAdd = maybeCloneWithAllocator(cell); + return new Pair(internalAdd(toAdd), toAdd); } @Override @@ -231,13 +231,13 @@ public class DefaultMemStore implements MemStore { return timeOfOldestEdit; } - private boolean addToCellSet(Cell e) { + private boolean addToCellSet(ServerCell e) { boolean b = this.cellSet.add(e); setOldestEditTimeToNow(); return b; } - private boolean removeFromCellSet(Cell e) { + private boolean removeFromCellSet(ServerCell e) { boolean b = this.cellSet.remove(e); setOldestEditTimeToNow(); return b; @@ -255,14 +255,14 @@ public class DefaultMemStore implements MemStore { * * Callers should ensure they already have the read lock taken */ - private long internalAdd(final Cell toAdd) { + private long internalAdd(final ServerCell toAdd) { long s = heapSizeChange(toAdd, addToCellSet(toAdd)); timeRangeTracker.includeTimestamp(toAdd); this.size.addAndGet(s); return s; } - private Cell maybeCloneWithAllocator(Cell cell) { + private ServerCell maybeCloneWithAllocator(ServerCell cell) { if (allocator == null) { return cell; } @@ -290,13 +290,13 @@ public class DefaultMemStore implements MemStore { * @param cell */ @Override - public void rollback(Cell cell) { + public void rollback(ServerCell cell) { // If the key is in the snapshot, delete it. We should not update // this.size, because that tracks the size of only the memstore and // not the snapshot. The flush of this snapshot to disk has not // yet started because Store.flush() waits for all rwcc transactions to // commit before starting the flush to disk. - Cell found = this.snapshot.get(cell); + ServerCell found = this.snapshot.get(cell); if (found != null && found.getSequenceId() == cell.getSequenceId()) { this.snapshot.remove(cell); long sz = heapSizeChange(cell, true); @@ -317,9 +317,9 @@ public class DefaultMemStore implements MemStore { * @return approximate size of the passed key and value. */ @Override - public long delete(Cell deleteCell) { + public long delete(ServerCell deleteCell) { long s = 0; - Cell toAdd = maybeCloneWithAllocator(deleteCell); + ServerCell toAdd = maybeCloneWithAllocator(deleteCell); s += heapSizeChange(toAdd, addToCellSet(toAdd)); timeRangeTracker.includeTimestamp(toAdd); this.size.addAndGet(s); @@ -331,7 +331,7 @@ public class DefaultMemStore implements MemStore { * first. * @return Next row or null if none found. */ - Cell getNextRow(final Cell cell) { + ServerCell getNextRow(final ServerCell cell) { return getLowest(getNextRow(cell, this.cellSet), getNextRow(cell, this.snapshot)); } @@ -340,7 +340,7 @@ public class DefaultMemStore implements MemStore { * @param b * @return Return lowest of a or b or null if both a and b are null */ - private Cell getLowest(final Cell a, final Cell b) { + private ServerCell getLowest(final ServerCell a, final ServerCell b) { if (a == null) { return b; } @@ -356,12 +356,12 @@ public class DefaultMemStore implements MemStore { * @return Next row or null if none found. If one found, will be a new * KeyValue -- can be destroyed by subsequent calls to this method. */ - private Cell getNextRow(final Cell key, - final NavigableSet set) { - Cell result = null; - SortedSet tail = key == null? set: set.tailSet(key); + private ServerCell getNextRow(final ServerCell key, + final NavigableSet set) { + ServerCell result = null; + SortedSet tail = key == null? set: set.tailSet(key); // Iterate until we fall into the next row; i.e. move off current row - for (Cell cell: tail) { + for (ServerCell cell: tail) { if (comparator.compareRows(cell, key) <= 0) continue; // Note: Not suppressing deletes or expired cells. Needs to be handled @@ -385,7 +385,7 @@ public class DefaultMemStore implements MemStore { * @param set * @param state Accumulates deletes and candidates. */ - private void getRowKeyAtOrBefore(final NavigableSet set, + private void getRowKeyAtOrBefore(final NavigableSet set, final GetClosestRowBeforeTracker state) { if (set.isEmpty()) { return; @@ -406,13 +406,13 @@ public class DefaultMemStore implements MemStore { * @param state * @return True if we found a candidate walking this row. */ - private boolean walkForwardInSingleRow(final SortedSet set, - final Cell firstOnRow, final GetClosestRowBeforeTracker state) { + private boolean walkForwardInSingleRow(final SortedSet set, + final ServerCell firstOnRow, final GetClosestRowBeforeTracker state) { boolean foundCandidate = false; - SortedSet tail = set.tailSet(firstOnRow); + SortedSet tail = set.tailSet(firstOnRow); if (tail.isEmpty()) return foundCandidate; - for (Iterator i = tail.iterator(); i.hasNext();) { - Cell kv = i.next(); + for (Iterator i = tail.iterator(); i.hasNext();) { + ServerCell kv = i.next(); // Did we go beyond the target row? If so break. if (state.isTooFar(kv, firstOnRow)) break; if (state.isExpired(kv)) { @@ -434,9 +434,9 @@ public class DefaultMemStore implements MemStore { * @param set * @param state */ - private void getRowKeyBefore(NavigableSet set, + private void getRowKeyBefore(NavigableSet set, final GetClosestRowBeforeTracker state) { - Cell firstOnRow = state.getTargetKey(); + ServerCell firstOnRow = state.getTargetKey(); for (Member p = memberOfPreviousRow(set, state, firstOnRow); p != null; p = memberOfPreviousRow(p.set, state, firstOnRow)) { // Make sure we don't fall out of our table. @@ -474,11 +474,11 @@ public class DefaultMemStore implements MemStore { byte[] qualifier, long newValue, long now) { - Cell firstCell = KeyValueUtil.createFirstOnRow(row, family, qualifier); + ServerCell firstCell = KeyValueUtil.createFirstOnRow(row, family, qualifier); // Is there a Cell in 'snapshot' with the same TS? If so, upgrade the timestamp a bit. - SortedSet snSs = snapshot.tailSet(firstCell); + SortedSet snSs = snapshot.tailSet(firstCell); if (!snSs.isEmpty()) { - Cell snc = snSs.first(); + ServerCell snc = snSs.first(); // is there a matching Cell in the snapshot? if (CellUtil.matchingRow(snc, firstCell) && CellUtil.matchingQualifier(snc, firstCell)) { if (snc.getTimestamp() == now) { @@ -494,8 +494,8 @@ public class DefaultMemStore implements MemStore { // so we cant add the new Cell w/o knowing what's there already, but we also // want to take this chance to delete some cells. So two loops (sad) - SortedSet ss = cellSet.tailSet(firstCell); - for (Cell cell : ss) { + SortedSet ss = cellSet.tailSet(firstCell); + for (ServerCell cell : ss) { // if this isnt the row we are interested in, then bail: if (!CellUtil.matchingColumn(cell, family, qualifier) || !CellUtil.matchingRow(cell, firstCell)) { @@ -511,7 +511,7 @@ public class DefaultMemStore implements MemStore { // create or update (upsert) a new Cell with // 'now' and a 0 memstoreTS == immediately visible - List cells = new ArrayList(1); + List cells = new ArrayList(1); cells.add(new KeyValue(row, family, qualifier, now, Bytes.toBytes(newValue))); return upsert(cells, 1L); } @@ -535,9 +535,9 @@ public class DefaultMemStore implements MemStore { * @return change in memstore size */ @Override - public long upsert(Iterable cells, long readpoint) { + public long upsert(Iterable cells, long readpoint) { long size = 0; - for (Cell cell : cells) { + for (ServerCell cell : cells) { size += upsert(cell, readpoint); } return size; @@ -557,7 +557,7 @@ public class DefaultMemStore implements MemStore { * @param cell * @return change in size of MemStore */ - private long upsert(Cell cell, long readpoint) { + private long upsert(ServerCell cell, long readpoint) { // Add the Cell to the MemStore // Use the internalAdd method here since we (a) already have a lock // and (b) cannot safely use the MSLAB here without potentially @@ -568,16 +568,16 @@ public class DefaultMemStore implements MemStore { // Get the Cells for the row/family/qualifier regardless of timestamp. // For this case we want to clean up any other puts - Cell firstCell = KeyValueUtil.createFirstOnRow( + ServerCell firstCell = KeyValueUtil.createFirstOnRow( cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); - SortedSet ss = cellSet.tailSet(firstCell); - Iterator it = ss.iterator(); + SortedSet ss = cellSet.tailSet(firstCell); + Iterator it = ss.iterator(); // versions visible to oldest scanner int versionsVisible = 0; while ( it.hasNext() ) { - Cell cur = it.next(); + ServerCell cur = it.next(); if (cell == cur) { // ignore the one just put in @@ -615,9 +615,9 @@ public class DefaultMemStore implements MemStore { * found in. Include set because it is carrying context. */ private static class Member { - final Cell cell; - final NavigableSet set; - Member(final NavigableSet s, final Cell kv) { + final ServerCell cell; + final NavigableSet set; + Member(final NavigableSet s, final ServerCell kv) { this.cell = kv; this.set = s; } @@ -631,12 +631,12 @@ public class DefaultMemStore implements MemStore { * member in. * @return Null or member of row previous to firstOnRow */ - private Member memberOfPreviousRow(NavigableSet set, - final GetClosestRowBeforeTracker state, final Cell firstOnRow) { - NavigableSet head = set.headSet(firstOnRow, false); + private Member memberOfPreviousRow(NavigableSet set, + final GetClosestRowBeforeTracker state, final ServerCell firstOnRow) { + NavigableSet head = set.headSet(firstOnRow, false); if (head.isEmpty()) return null; - for (Iterator i = head.descendingIterator(); i.hasNext();) { - Cell found = i.next(); + for (Iterator i = head.descendingIterator(); i.hasNext();) { + ServerCell found = i.next(); if (state.isExpired(found)) { i.remove(); continue; @@ -675,23 +675,23 @@ public class DefaultMemStore implements MemStore { */ protected class MemStoreScanner extends NonLazyKeyValueScanner { // Next row information for either cellSet or snapshot - private Cell cellSetNextRow = null; - private Cell snapshotNextRow = null; + private ServerCell cellSetNextRow = null; + private ServerCell snapshotNextRow = null; // last iterated Cells for cellSet and snapshot (to restore iterator state after reseek) - private Cell cellSetItRow = null; - private Cell snapshotItRow = null; + private ServerCell cellSetItRow = null; + private ServerCell snapshotItRow = null; // iterator based scanning. - private Iterator cellSetIt; - private Iterator snapshotIt; + private Iterator cellSetIt; + private Iterator snapshotIt; // The cellSet and snapshot at the time of creating this scanner private CellSkipListSet cellSetAtCreation; private CellSkipListSet snapshotAtCreation; // the pre-calculated Cell to be returned by peek() or next() - private Cell theNext; + private ServerCell theNext; // The allocator and snapshot allocator at the time of creating this scanner volatile MemStoreLAB allocatorAtCreation; @@ -748,9 +748,9 @@ public class DefaultMemStore implements MemStore { * @param it * @return Next Cell */ - private Cell getNext(Iterator it) { - Cell startCell = theNext; - Cell v = null; + private ServerCell getNext(Iterator it) { + ServerCell startCell = theNext; + ServerCell v = null; try { while (it.hasNext()) { v = it.next(); @@ -784,7 +784,7 @@ public class DefaultMemStore implements MemStore { * @return false if the key is null or if there is no data */ @Override - public synchronized boolean seek(Cell key) { + public synchronized boolean seek(ServerCell key) { if (key == null) { close(); return false; @@ -803,7 +803,7 @@ public class DefaultMemStore implements MemStore { /** * (Re)initialize the iterators after a seek or a reseek. */ - private synchronized boolean seekInSubLists(Cell key){ + private synchronized boolean seekInSubLists(ServerCell key){ cellSetNextRow = getNext(cellSetIt); snapshotNextRow = getNext(snapshotIt); @@ -821,7 +821,7 @@ public class DefaultMemStore implements MemStore { * @return true if there is at least one KV to read, false otherwise */ @Override - public synchronized boolean reseek(Cell key) { + public synchronized boolean reseek(ServerCell key) { /* See HBASE-4195 & HBASE-3855 & HBASE-6591 for the background on this implementation. This code is executed concurrently with flush and puts, without locks. @@ -842,18 +842,18 @@ public class DefaultMemStore implements MemStore { @Override - public synchronized Cell peek() { + public synchronized ServerCell peek() { //DebugPrint.println(" MS@" + hashCode() + " peek = " + getLowest()); return theNext; } @Override - public synchronized Cell next() { + public synchronized ServerCell next() { if (theNext == null) { return null; } - final Cell ret = theNext; + final ServerCell ret = theNext; // Advance one of the iterators if (theNext == cellSetNextRow) { @@ -876,7 +876,7 @@ public class DefaultMemStore implements MemStore { * This uses comparator.compare() to compare the KeyValue using the memstore * comparator. */ - private Cell getLowest(Cell first, Cell second) { + private ServerCell getLowest(ServerCell first, ServerCell second) { if (first == null && second == null) { return null; } @@ -892,7 +892,7 @@ public class DefaultMemStore implements MemStore { * This uses comparator.compare() to compare the Cell using the memstore * comparator. */ - private Cell getHighest(Cell first, Cell second) { + private ServerCell getHighest(ServerCell first, ServerCell second) { if (first == null && second == null) { return null; } @@ -944,7 +944,7 @@ public class DefaultMemStore implements MemStore { * the scanner to the previous row of given key */ @Override - public synchronized boolean backwardSeek(Cell key) { + public synchronized boolean backwardSeek(ServerCell key) { seek(key); if (peek() == null || comparator.compareRows(peek(), key) > 0) { return seekToPreviousRow(key); @@ -958,21 +958,21 @@ public class DefaultMemStore implements MemStore { * specified key, then seek to the first KeyValue of previous row */ @Override - public synchronized boolean seekToPreviousRow(Cell key) { - Cell firstKeyOnRow = KeyValueUtil.createFirstOnRow(key.getRowArray(), key.getRowOffset(), + public synchronized boolean seekToPreviousRow(ServerCell key) { + ServerCell firstKeyOnRow = KeyValueUtil.createFirstOnRow(key.getRowArray(), key.getRowOffset(), key.getRowLength()); - SortedSet cellHead = cellSetAtCreation.headSet(firstKeyOnRow); - Cell cellSetBeforeRow = cellHead.isEmpty() ? null : cellHead.last(); - SortedSet snapshotHead = snapshotAtCreation + SortedSet cellHead = cellSetAtCreation.headSet(firstKeyOnRow); + ServerCell cellSetBeforeRow = cellHead.isEmpty() ? null : cellHead.last(); + SortedSet snapshotHead = snapshotAtCreation .headSet(firstKeyOnRow); - Cell snapshotBeforeRow = snapshotHead.isEmpty() ? null : snapshotHead + ServerCell snapshotBeforeRow = snapshotHead.isEmpty() ? null : snapshotHead .last(); - Cell lastCellBeforeRow = getHighest(cellSetBeforeRow, snapshotBeforeRow); + ServerCell lastCellBeforeRow = getHighest(cellSetBeforeRow, snapshotBeforeRow); if (lastCellBeforeRow == null) { theNext = null; return false; } - Cell firstKeyOnPreviousRow = KeyValueUtil.createFirstOnRow(lastCellBeforeRow.getRowArray(), + ServerCell firstKeyOnPreviousRow = KeyValueUtil.createFirstOnRow(lastCellBeforeRow.getRowArray(), lastCellBeforeRow.getRowOffset(), lastCellBeforeRow.getRowLength()); this.stopSkippingCellsIfNextRow = true; seek(firstKeyOnPreviousRow); @@ -986,15 +986,15 @@ public class DefaultMemStore implements MemStore { @Override public synchronized boolean seekToLastRow() { - Cell first = cellSetAtCreation.isEmpty() ? null : cellSetAtCreation + ServerCell first = cellSetAtCreation.isEmpty() ? null : cellSetAtCreation .last(); - Cell second = snapshotAtCreation.isEmpty() ? null + ServerCell second = snapshotAtCreation.isEmpty() ? null : snapshotAtCreation.last(); - Cell higherCell = getHighest(first, second); + ServerCell higherCell = getHighest(first, second); if (higherCell == null) { return false; } - Cell firstCellOnLastRow = KeyValueUtil.createFirstOnRow(higherCell.getRowArray(), + ServerCell firstCellOnLastRow = KeyValueUtil.createFirstOnRow(higherCell.getRowArray(), higherCell.getRowOffset(), higherCell.getRowLength()); if (seek(firstCellOnLastRow)) { return true; @@ -1019,7 +1019,7 @@ public class DefaultMemStore implements MemStore { * @param notpresent True if the cell was NOT present in the set. * @return Size */ - static long heapSizeChange(final Cell cell, final boolean notpresent) { + static long heapSizeChange(final ServerCell cell, final boolean notpresent) { return notpresent ? ClassSize.align(ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + CellUtil.estimatedHeapSizeOf(cell)) : 0; } @@ -1063,19 +1063,19 @@ public class DefaultMemStore implements MemStore { byte [] empty = new byte[0]; for (int i = 0; i < count; i++) { // Give each its own ts - Pair ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty)); + Pair ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty)); size += ret.getFirst(); } LOG.info("memstore1 estimated size=" + size); for (int i = 0; i < count; i++) { - Pair ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty)); + Pair ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty)); size += ret.getFirst(); } LOG.info("memstore1 estimated size (2nd loading of same data)=" + size); // Make a variably sized memstore. DefaultMemStore memstore2 = new DefaultMemStore(); for (int i = 0; i < count; i++) { - Pair ret = memstore2.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, + Pair ret = memstore2.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, new byte[i])); size += ret.getFirst(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java index 8305b99..cfc2e91 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java @@ -28,9 +28,9 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration; @@ -110,7 +110,7 @@ class DefaultStoreFileManager implements StoreFileManager { @Override public Iterator updateCandidateFilesForRowKeyBefore( - Iterator candidateFiles, final KeyValue targetKey, final Cell candidate) { + Iterator candidateFiles, final KeyValue targetKey, final ServerCell candidate) { // Default store has nothing useful to do here. // TODO: move this comment when implementing Level: // Level store can trim the list by range, removing all the files which cannot have diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java index 70254fe..bffd104 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hbase.regionserver; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; /** * This interface is used for the tracking and enforcement of Deletes @@ -40,7 +40,7 @@ public interface DeleteTracker { * This is called when a Delete is encountered in a StoreFile. * @param cell - the delete cell */ - void add(Cell cell); + void add(ServerCell cell); /** * Check if the specified cell buffer has been deleted by a previously @@ -48,7 +48,7 @@ public interface DeleteTracker { * @param cell - current cell to check if deleted by a previously seen delete * @return deleteResult The result tells whether the KeyValue is deleted and why */ - DeleteResult isDeleted(Cell cell); + DeleteResult isDeleted(ServerCell cell); /** * @return true if there are no current delete, false otherwise diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java index 4d22c0e..68bee57 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java @@ -24,12 +24,12 @@ import java.util.TreeMap; import java.util.TreeSet; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.util.Bytes; /** @@ -44,7 +44,7 @@ class GetClosestRowBeforeTracker { // Any cell w/ a ts older than this is expired. private final long now; private final long oldestUnexpiredTs; - private Cell candidate = null; + private ServerCell candidate = null; private final KVComparator kvcomparator; // Flag for whether we're doing getclosest on a metaregion. private final boolean metaregion; @@ -53,7 +53,7 @@ class GetClosestRowBeforeTracker { private final int tablenamePlusDelimiterLength; // Deletes keyed by row. Comparator compares on row portion of KeyValue only. - private final NavigableMap> deletes; + private final NavigableMap> deletes; /** * @param c @@ -79,17 +79,17 @@ class GetClosestRowBeforeTracker { this.now = System.currentTimeMillis(); this.oldestUnexpiredTs = now - ttl; this.kvcomparator = c; - this.deletes = new TreeMap>(new CellComparator.RowComparator()); + this.deletes = new TreeMap>(new CellComparator.RowComparator()); } /* * Add the specified KeyValue to the list of deletes. * @param kv */ - private void addDelete(final Cell kv) { - NavigableSet rowdeletes = this.deletes.get(kv); + private void addDelete(final ServerCell kv) { + NavigableSet rowdeletes = this.deletes.get(kv); if (rowdeletes == null) { - rowdeletes = new TreeSet(this.kvcomparator); + rowdeletes = new TreeSet(this.kvcomparator); this.deletes.put(kv, rowdeletes); } rowdeletes.add(kv); @@ -99,7 +99,7 @@ class GetClosestRowBeforeTracker { * @param kv Adds candidate if nearer the target than previous candidate. * @return True if updated candidate. */ - private boolean addCandidate(final Cell kv) { + private boolean addCandidate(final ServerCell kv) { if (!isDeleted(kv) && isBetterCandidate(kv)) { this.candidate = kv; return true; @@ -107,7 +107,7 @@ class GetClosestRowBeforeTracker { return false; } - boolean isBetterCandidate(final Cell contender) { + boolean isBetterCandidate(final ServerCell contender) { return this.candidate == null || (this.kvcomparator.compareRows(this.candidate, contender) < 0 && this.kvcomparator.compareRows(contender, this.targetkey) <= 0); @@ -119,9 +119,9 @@ class GetClosestRowBeforeTracker { * @param kv * @return true is the specified KeyValue is deleted, false if not */ - private boolean isDeleted(final Cell kv) { + private boolean isDeleted(final ServerCell kv) { if (this.deletes.isEmpty()) return false; - NavigableSet rowdeletes = this.deletes.get(kv); + NavigableSet rowdeletes = this.deletes.get(kv); if (rowdeletes == null || rowdeletes.isEmpty()) return false; return isDeleted(kv, rowdeletes); } @@ -133,9 +133,9 @@ class GetClosestRowBeforeTracker { * @param ds * @return True is the specified KeyValue is deleted, false if not */ - public boolean isDeleted(final Cell kv, final NavigableSet ds) { + public boolean isDeleted(final ServerCell kv, final NavigableSet ds) { if (deletes == null || deletes.isEmpty()) return false; - for (Cell d: ds) { + for (ServerCell d: ds) { long kvts = kv.getTimestamp(); long dts = d.getTimestamp(); if (CellUtil.isDeleteFamily(d)) { @@ -170,7 +170,7 @@ class GetClosestRowBeforeTracker { * @param cell * @return true if the cell is expired */ - public boolean isExpired(final Cell cell) { + public boolean isExpired(final ServerCell cell) { return cell.getTimestamp() < this.oldestUnexpiredTs || HStore.isCellTTLExpired(cell, this.oldestUnexpiredTs, this.now); } @@ -183,7 +183,7 @@ class GetClosestRowBeforeTracker { * @param kv * @return True if we removed k from candidates. */ - boolean handleDeletes(final Cell kv) { + boolean handleDeletes(final ServerCell kv) { addDelete(kv); boolean deleted = false; if (!hasCandidate()) return deleted; @@ -199,7 +199,7 @@ class GetClosestRowBeforeTracker { * @param kv * @return True if we added a candidate */ - boolean handle(final Cell kv) { + boolean handle(final ServerCell kv) { if (CellUtil.isDelete(kv)) { handleDeletes(kv); return false; @@ -217,7 +217,7 @@ class GetClosestRowBeforeTracker { /** * @return Best candidate or null. */ - public Cell getCandidate() { + public ServerCell getCandidate() { return this.candidate; } @@ -230,11 +230,11 @@ class GetClosestRowBeforeTracker { * @param firstOnRow on row kv. * @return True if we went too far, past the target key. */ - boolean isTooFar(final Cell kv, final Cell firstOnRow) { + boolean isTooFar(final ServerCell kv, final ServerCell firstOnRow) { return this.kvcomparator.compareRows(kv, firstOnRow) > 0; } - boolean isTargetTable(final Cell kv) { + boolean isTargetTable(final ServerCell kv) { if (!metaregion) return true; // Compare start of keys row. Compare including delimiter. Saves having // to calculate where tablename ends in the candidate kv. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 83127d2..fe03bd3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -86,6 +86,7 @@ import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RegionTooBusyException; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; @@ -2552,7 +2553,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi void updateDeleteLatestVersionTimeStamp(Cell cell, Get get, int count, byte[] byteNow) throws IOException { - List result = get(get, false); + List result = get(get, false); if (result.size() < count) { // Nothing to delete @@ -3050,7 +3051,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi WALEdit fromCP = batchOp.walEditsFromCoprocessors[i]; if (fromCP != null) { for (Cell cell : fromCP.getCells()) { - walEdit.add(cell); + walEdit.add((ServerCell) cell); } } addFamilyMapToWALEdit(familyMaps[i], walEdit); @@ -3255,7 +3256,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi return processed; } } - List result = get(get, false); + List result = get(get, false); boolean valueIsNull = comparator.getValue() == null || comparator.getValue().length == 0; @@ -3335,7 +3336,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // wait for all previous transactions to complete (with lock held) mvcc.waitForPreviousTransactionsComplete(); try { - List result = get(get, false); + List result = get(get, false); boolean valueIsNull = comparator.getValue() == null || comparator.getValue().length == 0; @@ -3577,7 +3578,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi for (int i=0; i < listSize; i++) { Cell cell = cells.get(i); CellUtil.setSequenceId(cell, mvccNum); - Pair ret = store.add(cell); + Pair ret = store.add((ServerCell)cell); size += ret.getFirst(); memstoreCells.add(ret.getSecond()); if(isInReplay) { @@ -3601,7 +3602,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi for (Cell cell : memstoreCells) { byte[] family = CellUtil.cloneFamily(cell); Store store = getStore(family); - store.rollback(cell); + store.rollback((ServerCell)cell); kvsRolledback++; } LOG.debug("rollbackMemstore rolled back " + kvsRolledback); @@ -3673,7 +3674,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi int listSize = edits.size(); for (int i=0; i < listSize; i++) { Cell cell = edits.get(i); - walEdit.add(cell); + walEdit.add((ServerCell) cell); } } } @@ -4812,7 +4813,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi * @return True if we should flush. */ protected boolean restoreEdit(final Store s, final Cell cell) { - long kvSize = s.add(cell).getFirst(); + long kvSize = s.add((ServerCell) cell).getFirst(); if (this.rsAccounting != null) { rsAccounting.addAndGetRegionReplayEditsSize(getRegionInfo().getRegionName(), kvSize); } @@ -5171,7 +5172,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi /** * If the joined heap data gathering is interrupted due to scan limits, this will * contain the row for which we are populating the values.*/ - protected Cell joinedContinuationRow = null; + protected ServerCell joinedContinuationRow = null; protected final byte[] stopRow; private final FilterWrapper filter; private int batch; @@ -5273,19 +5274,19 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } @Override - public NextState next(List outResults) + public NextState next(List outResults) throws IOException { // apply the batching limit by default return next(outResults, batch); } @Override - public NextState next(List outResults, int limit) throws IOException { + public NextState next(List outResults, int limit) throws IOException { return next(outResults, limit, -1); } @Override - public synchronized NextState next(List outResults, int limit, long remainingResultSize) + public synchronized NextState next(List outResults, int limit, long remainingResultSize) throws IOException { if (this.filterClosed) { throw new UnknownScannerException("Scanner was closed (timed out?) " + @@ -5302,18 +5303,18 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } @Override - public NextState nextRaw(List outResults) throws IOException { + public NextState nextRaw(List outResults) throws IOException { return nextRaw(outResults, batch); } @Override - public NextState nextRaw(List outResults, int limit) + public NextState nextRaw(List outResults, int limit) throws IOException { return nextRaw(outResults, limit, -1); } @Override - public NextState nextRaw(List outResults, int batchLimit, long remainingResultSize) + public NextState nextRaw(List outResults, int batchLimit, long remainingResultSize) throws IOException { if (storeHeap == null) { // scanner is closed @@ -5325,7 +5326,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // to handle scan or get operation. state = nextInternal(outResults, batchLimit, remainingResultSize); } else { - List tmpList = new ArrayList(); + List tmpList = new ArrayList(); state = nextInternal(tmpList, batchLimit, remainingResultSize); outResults.addAll(tmpList); } @@ -5350,7 +5351,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi * @return the state the joinedHeap returned on the call to * {@link KeyValueHeap#next(List, int, long)} */ - private NextState populateFromJoinedHeap(List results, int limit, long resultSize) + private NextState populateFromJoinedHeap(List results, int limit, long resultSize) throws IOException { assert joinedContinuationRow != null; NextState state = @@ -5379,12 +5380,12 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi * @param length length for currentRow * @return state of last call to {@link KeyValueHeap#next()} */ - private NextState populateResult(List results, KeyValueHeap heap, int batchLimit, + private NextState populateResult(List results, KeyValueHeap heap, int batchLimit, long remainingResultSize, byte[] currentRow, int offset, short length) throws IOException { - Cell nextKv; + ServerCell nextKv; boolean moreCellsInRow = false; long accumulatedResultSize = 0; - List tmpResults = new ArrayList(); + List tmpResults = new ArrayList(); do { int remainingBatchLimit = batchLimit - results.size(); NextState heapState = @@ -5423,7 +5424,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi * @param length * @return true When there are more cells in the row to be read */ - private boolean moreCellsInRow(final Cell nextKv, byte[] currentRow, int offset, + private boolean moreCellsInRow(final ServerCell nextKv, byte[] currentRow, int offset, short length) { return nextKv != null && CellUtil.matchingRow(nextKv, currentRow, offset, length); } @@ -5436,7 +5437,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi * @param state The state returned from the scanner that generated these results * @return aggregate size of results */ - private long calculateResultSize(List results, NextState state) { + private long calculateResultSize(List results, NextState state) { if (results == null || results.isEmpty()) return 0; // In general, the state should contain the estimate because the result size used to @@ -5445,7 +5446,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi if (state != null && state.hasResultSizeEstimate()) return state.getResultSize(); long size = 0; - for (Cell c : results) { + for (ServerCell c : results) { size += CellUtil.estimatedHeapSizeOfWithoutTags(c); } @@ -5464,7 +5465,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi return this.filter != null && this.filter.filterAllRemaining(); } - private NextState nextInternal(List results, int batchLimit, long remainingResultSize) + private NextState nextInternal(List results, int batchLimit, long remainingResultSize) throws IOException { if (!results.isEmpty()) { throw new IllegalArgumentException("First parameter should be an empty list"); @@ -5493,7 +5494,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } // Let's see what we have in the storeHeap. - Cell current = this.storeHeap.peek(); + ServerCell current = this.storeHeap.peek(); byte[] currentRow = null; int offset = 0; @@ -5570,7 +5571,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // We hit the size limit. return NextState.makeState(NextState.State.SIZE_LIMIT_REACHED, resultSize); } - Cell nextKv = this.storeHeap.peek(); + ServerCell nextKv = this.storeHeap.peek(); stopRow = nextKv == null || isStopRow(nextKv.getRowArray(), nextKv.getRowOffset(), nextKv.getRowLength()); // save that the row was empty before filters applied to it. @@ -5599,7 +5600,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // These values are not needed for filter to work, so we postpone their // fetch to (possibly) reduce amount of data loads from disk. if (this.joinedHeap != null) { - Cell nextJoinedKv = joinedHeap.peek(); + ServerCell nextJoinedKv = joinedHeap.peek(); // If joinedHeap is pointing to some other row, try to seek to a correct one. boolean mayHaveData = (nextJoinedKv != null && CellUtil.matchingRow(nextJoinedKv, currentRow, offset, length)) @@ -5675,7 +5676,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi protected boolean nextRow(byte [] currentRow, int offset, short length) throws IOException { assert this.joinedContinuationRow == null: "Trying to go to next row during joinedHeap read."; - Cell next; + ServerCell next; while ((next = this.storeHeap.peek()) != null && CellUtil.matchingRow(next, currentRow, offset, length)) { this.storeHeap.next(MOCKED_LIST); @@ -6292,15 +6293,15 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi get.addFamily(family); } } - List results = get(get, true); + List results = get(get, true); boolean stale = this.getRegionInfo().getReplicaId() != 0; return Result.create(results, get.isCheckExistenceOnly() ? !results.isEmpty() : null, stale); } @Override - public List get(Get get, boolean withCoprocessor) throws IOException { + public List get(Get get, boolean withCoprocessor) throws IOException { - List results = new ArrayList(); + List results = new ArrayList(); // pre-get CP hook if (withCoprocessor && (coprocessorHost != null)) { @@ -6328,7 +6329,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // do after lock if (this.metricsRegion != null) { long totalSize = 0L; - for (Cell cell : results) { + for (ServerCell cell : results) { totalSize += CellUtil.estimatedSerializedSizeOf(cell); } this.metricsRegion.updateGet(totalSize); @@ -6478,7 +6479,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi checkFamily(CellUtil.cloneFamily(cell)); // unreachable } - Pair ret = store.add(cell); + Pair ret = store.add((ServerCell)cell); addedSize += ret.getFirst(); memstoreCells.add(ret.getSecond()); } @@ -6523,7 +6524,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi processor.getRowsToLock().iterator().next()) + "..."); for (Mutation m : mutations) { for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { - Cell cell = cellScanner.current(); + ServerCell cell = (ServerCell) cellScanner.current(); getStore(cell).rollback(cell); } } @@ -6618,7 +6619,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi boolean writeToWAL = durability != Durability.SKIP_WAL; WALEdit walEdits = null; List allKVs = new ArrayList(append.size()); - Map> tempMemstore = new HashMap>(); + Map> tempMemstore = new HashMap>(); long size = 0; long txid = 0; @@ -6655,7 +6656,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi for (Map.Entry> family : append.getFamilyCellMap().entrySet()) { Store store = stores.get(family.getKey()); - List kvs = new ArrayList(family.getValue().size()); + List kvs = new ArrayList(family.getValue().size()); // Sort the cells so that they match the order that they // appear in the Get results. Otherwise, we won't be able to @@ -6667,7 +6668,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi for (Cell cell : family.getValue()) { get.addColumn(family.getKey(), CellUtil.cloneQualifier(cell)); } - List results = get(get, false); + List results = get(get, false); // Iterate the input columns and update existing values if they were // found, otherwise add new column initialized to the append value @@ -6676,8 +6677,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // Would be nice if KeyValue had scatter/gather logic int idx = 0; for (Cell cell : family.getValue()) { - Cell newCell; - Cell oldCell = null; + ServerCell newCell; + ServerCell oldCell = null; if (idx < results.size() && CellUtil.matchingQualifier(results.get(idx), cell)) { oldCell = results.get(idx); @@ -6759,14 +6760,14 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), newTags); } else { - newCell = cell; + newCell = (ServerCell) cell; } } CellUtil.setSequenceId(newCell, mvccNum); // Give coprocessors a chance to update the new cell if (coprocessorHost != null) { - newCell = coprocessorHost.postMutationBeforeWAL(RegionObserver.MutationType.APPEND, + newCell = (ServerCell) coprocessorHost.postMutationBeforeWAL(RegionObserver.MutationType.APPEND, append, oldCell, newCell); } kvs.add(newCell); @@ -6785,7 +6786,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } //Actually write to Memstore now - for (Map.Entry> entry : tempMemstore.entrySet()) { + for (Map.Entry> entry : tempMemstore.entrySet()) { Store store = entry.getKey(); if (store.getFamily().getMaxVersions() == 1) { // upsert if VERSIONS for this CF == 1 @@ -6793,8 +6794,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi memstoreCells.addAll(entry.getValue()); } else { // otherwise keep older versions around - for (Cell cell: entry.getValue()) { - Pair ret = store.add(cell); + for (ServerCell cell: entry.getValue()) { + Pair ret = store.add(cell); size += ret.getFirst(); memstoreCells.add(ret.getSecond()); doRollBackMemstore = true; @@ -6880,7 +6881,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi boolean writeToWAL = durability != Durability.SKIP_WAL; WALEdit walEdits = null; List allKVs = new ArrayList(increment.size()); - Map> tempMemstore = new HashMap>(); + Map> tempMemstore = new HashMap>(); long size = 0; long txid = 0; @@ -6919,7 +6920,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi increment.getFamilyCellMap().entrySet()) { Store store = stores.get(family.getKey()); - List kvs = new ArrayList(family.getValue().size()); + List kvs = new ArrayList(family.getValue().size()); // Sort the cells so that they match the order that they // appear in the Get results. Otherwise, we won't be able to @@ -6932,7 +6933,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi get.addColumn(family.getKey(), CellUtil.cloneQualifier(cell)); } get.setTimeRange(tr.getMin(), tr.getMax()); - List results = get(get, false); + List results = get(get, false); // Iterate the input columns and update existing values if they were // found, otherwise add new column initialized to the increment amount @@ -6953,7 +6954,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } } - Cell c = null; + ServerCell c = null; long ts = now; if (idx < results.size() && CellUtil.matchingQualifier(results.get(idx), cell)) { c = results.get(idx); @@ -6986,7 +6987,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(increment.getTTL()))); } - Cell newKV = new KeyValue(row, 0, row.length, + ServerCell newKV = new KeyValue(row, 0, row.length, family.getKey(), 0, family.getKey().length, q, 0, q.length, ts, @@ -6998,8 +6999,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // Give coprocessors a chance to update the new cell if (coprocessorHost != null) { - newKV = coprocessorHost.postMutationBeforeWAL( - RegionObserver.MutationType.INCREMENT, increment, c, newKV); + newKV = (ServerCell) coprocessorHost.postMutationBeforeWAL( + RegionObserver.MutationType.INCREMENT, increment, c, newKV);// TODO } allKVs.add(newKV); @@ -7024,7 +7025,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi //Actually write to Memstore now if (!tempMemstore.isEmpty()) { - for (Map.Entry> entry : tempMemstore.entrySet()) { + for (Map.Entry> entry : tempMemstore.entrySet()) { Store store = entry.getKey(); if (store.getFamily().getMaxVersions() == 1) { // upsert if VERSIONS for this CF == 1 @@ -7032,8 +7033,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi memstoreCells.addAll(entry.getValue()); } else { // otherwise keep older versions around - for (Cell cell : entry.getValue()) { - Pair ret = store.add(cell); + for (ServerCell cell : entry.getValue()) { + Pair ret = store.add(cell); size += ret.getFirst(); memstoreCells.add(ret.getSecond()); doRollBackMemstore = true; @@ -7265,7 +7266,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // scan.addFamily(HConstants.CATALOG_FAMILY); RegionScanner scanner = region.getScanner(scan); try { - List kvs = new ArrayList(); + List kvs = new ArrayList(); boolean done; do { kvs.clear(); @@ -7571,15 +7572,15 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi /** * A mocked list implementation - discards all updates. */ - private static final List MOCKED_LIST = new AbstractList() { + private static final List MOCKED_LIST = new AbstractList() { @Override - public void add(int index, Cell element) { + public void add(int index, ServerCell element) { // do nothing } @Override - public boolean addAll(int index, Collection c) { + public boolean addAll(int index, Collection c) { return false; // this list is never changed as a result of an update } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 686df49..88b4946 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -47,7 +47,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompoundConfiguration; @@ -55,6 +54,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; @@ -662,7 +662,7 @@ public class HStore implements Store { } @Override - public Pair add(final Cell cell) { + public Pair add(final ServerCell cell) { lock.readLock().lock(); try { return this.memstore.add(cell); @@ -692,7 +692,7 @@ public class HStore implements Store { } @Override - public void rollback(final Cell cell) { + public void rollback(final ServerCell cell) { lock.readLock().lock(); try { this.memstore.rollback(cell); @@ -746,11 +746,11 @@ public class HStore implements Store { if (verifyBulkLoads) { long verificationStartTime = EnvironmentEdgeManager.currentTime(); LOG.info("Full verification started for bulk load hfile: " + srcPath.toString()); - Cell prevCell = null; + ServerCell prevCell = null; HFileScanner scanner = reader.getScanner(false, false, false); scanner.seekTo(); do { - Cell cell = scanner.getKeyValue(); + ServerCell cell = scanner.getKeyValue(); if (prevCell != null) { if (CellComparator.compareRows(prevCell, cell) > 0) { throw new InvalidHFileException("Previous row is greater than" @@ -1757,7 +1757,7 @@ public class HStore implements Store { * @param oldestTimestamp * @return true if the cell is expired */ - static boolean isCellTTLExpired(final Cell cell, final long oldestTimestamp, final long now) { + static boolean isCellTTLExpired(final ServerCell cell, final long oldestTimestamp, final long now) { // Do not create an Iterator or Tag objects unless the cell actually has // tags if (cell.getTagsLength() > 0) { @@ -1787,7 +1787,7 @@ public class HStore implements Store { } @Override - public Cell getRowKeyAtOrBefore(final byte[] row) throws IOException { + public ServerCell getRowKeyAtOrBefore(final byte[] row) throws IOException { // If minVersions is set, we will not ignore expired KVs. // As we're only looking for the latest matches, that should be OK. // With minVersions > 0 we guarantee that any KV that has any version @@ -1812,7 +1812,7 @@ public class HStore implements Store { StoreFile sf = sfIterator.next(); sfIterator.remove(); // Remove sf from iterator. boolean haveNewCandidate = rowAtOrBeforeFromStoreFile(sf, state); - Cell candidate = state.getCandidate(); + ServerCell candidate = state.getCandidate(); // we have an optimization here which stops the search if we find exact match. if (candidate != null && CellUtil.matchingRow(candidate, row)) { return candidate; @@ -1872,7 +1872,7 @@ public class HStore implements Store { // If here, need to start backing up. while (scanner.seekBefore(firstOnRow.getBuffer(), firstOnRow.getKeyOffset(), firstOnRow.getKeyLength())) { - Cell kv = scanner.getKeyValue(); + ServerCell kv = scanner.getKeyValue(); if (!state.isTargetTable(kv)) break; if (!state.isBetterCandidate(kv)) break; // Make new first on row. @@ -1920,7 +1920,7 @@ public class HStore implements Store { throws IOException { boolean foundCandidate = false; do { - Cell kv = scanner.getKeyValue(); + ServerCell kv = scanner.getKeyValue(); // If we are not in the row, skip. if (this.comparator.compareRows(kv, firstOnRow) < 0) continue; // Did we go beyond the target row? If so break. @@ -2149,7 +2149,7 @@ public class HStore implements Store { } @Override - public long upsert(Iterable cells, long readpoint) throws IOException { + public long upsert(Iterable cells, long readpoint) throws IOException { this.lock.readLock().lock(); try { return this.memstore.upsert(cells, readpoint); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java index ea5a75f..0f6b533 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java @@ -22,7 +22,7 @@ import java.io.Closeable; import java.io.IOException; import java.util.List; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** @@ -231,7 +231,7 @@ public interface InternalScanner extends Closeable { * one, false if scanner is done * @throws IOException e */ - NextState next(List results) throws IOException; + NextState next(List results) throws IOException; /** * Grab the next row's worth of values with a limit on the number of values to return. @@ -241,7 +241,7 @@ public interface InternalScanner extends Closeable { * one, false if scanner is done * @throws IOException e */ - NextState next(List result, int limit) throws IOException; + NextState next(List result, int limit) throws IOException; /** * Grab the next row's worth of values with a limit on the number of values to return as well as a @@ -253,7 +253,7 @@ public interface InternalScanner extends Closeable { * one, false if scanner is done * @throws IOException e */ - NextState next(List result, int limit, long remainingResultSize) throws IOException; + NextState next(List result, int limit, long remainingResultSize) throws IOException; /** * Closes the scanner and releases any resources it has allocated diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java index beb23cf..def2b08 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java @@ -24,8 +24,8 @@ import java.util.Comparator; import java.util.List; import java.util.PriorityQueue; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** @@ -93,19 +93,19 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner } } - public Cell peek() { + public ServerCell peek() { if (this.current == null) { return null; } return this.current.peek(); } - public Cell next() throws IOException { + public ServerCell next() throws IOException { if(this.current == null) { return null; } - Cell kvReturn = this.current.next(); - Cell kvNext = this.current.peek(); + ServerCell kvReturn = this.current.next(); + ServerCell kvNext = this.current.peek(); if (kvNext == null) { this.current.close(); this.current = pollRealKV(); @@ -132,11 +132,11 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * @return state where NextState#hasMoreValues() is true if more keys exist after this * one, false if scanner is done */ - public NextState next(List result, int limit) throws IOException { + public NextState next(List result, int limit) throws IOException { return next(result, limit, -1); } - public NextState next(List result, int limit, long remainingResultSize) throws IOException { + public NextState next(List result, int limit, long remainingResultSize) throws IOException { if (this.current == null) { return NextState.makeState(NextState.State.NO_MORE_VALUES); } @@ -148,7 +148,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner throw new IOException("Invalid state returned from InternalScanner#next"); } boolean mayContainMoreRows = NextState.hasMoreValues(state); - Cell pee = this.current.peek(); + ServerCell pee = this.current.peek(); /* * By definition, any InternalScanner must return false only when it has no * further rows to be fetched. So, we can close a scanner if it returns @@ -179,7 +179,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * @return state where NextState#hasMoreValues() is true if more keys exist after this * one, false if scanner is done */ - public NextState next(List result) throws IOException { + public NextState next(List result) throws IOException { return next(result, -1); } @@ -216,7 +216,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * @param right * @return less than 0 if left is smaller, 0 if equal etc.. */ - public int compare(Cell left, Cell right) { + public int compare(ServerCell left, ServerCell right) { return this.kvComparator.compare(left, right); } /** @@ -255,7 +255,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * @throws IOException */ @Override - public boolean seek(Cell seekKey) throws IOException { + public boolean seek(ServerCell seekKey) throws IOException { return generalizedSeek(false, // This is not a lazy seek seekKey, false, // forward (false: this is not a reseek) @@ -267,7 +267,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * that scanner.seek(seekKey) is changed to scanner.reseek(seekKey). */ @Override - public boolean reseek(Cell seekKey) throws IOException { + public boolean reseek(ServerCell seekKey) throws IOException { return generalizedSeek(false, // This is not a lazy seek seekKey, true, // forward (true because this is reseek) @@ -278,7 +278,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * {@inheritDoc} */ @Override - public boolean requestSeek(Cell key, boolean forward, + public boolean requestSeek(ServerCell key, boolean forward, boolean useBloom) throws IOException { return generalizedSeek(true, key, forward, useBloom); } @@ -291,7 +291,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * @param forward whether to seek forward (also known as reseek) * @param useBloom whether to optimize seeks using Bloom filters */ - private boolean generalizedSeek(boolean isLazy, Cell seekKey, + private boolean generalizedSeek(boolean isLazy, ServerCell seekKey, boolean forward, boolean useBloom) throws IOException { if (!isLazy && useBloom) { throw new IllegalArgumentException("Multi-column Bloom filter " + @@ -306,7 +306,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner KeyValueScanner scanner; while ((scanner = heap.poll()) != null) { - Cell topKey = scanner.peek(); + ServerCell topKey = scanner.peek(); if (comparator.getComparator().compare(seekKey, topKey) <= 0) { // Top KeyValue is at-or-after Seek KeyValue. We only know that all // scanners are at or after seekKey (because fake keys of @@ -359,7 +359,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner while (kvScanner != null && !kvScanner.realSeekDone()) { if (kvScanner.peek() != null) { kvScanner.enforceSeek(); - Cell curKV = kvScanner.peek(); + ServerCell curKV = kvScanner.peek(); if (curKV != null) { KeyValueScanner nextEarliestScanner = heap.peek(); if (nextEarliestScanner == null) { @@ -369,7 +369,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner // Compare the current scanner to the next scanner. We try to avoid // putting the current one back into the heap if possible. - Cell nextKV = nextEarliestScanner.peek(); + ServerCell nextKV = nextEarliestScanner.peek(); if (nextKV == null || comparator.compare(curKV, nextKV) < 0) { // We already have the scanner with the earliest KV, so return it. return kvScanner; @@ -412,7 +412,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner } @Override - public Cell getNextIndexedKey() { + public ServerCell getNextIndexedKey() { // here we return the next index key from the top scanner return current == null ? null : current.getNextIndexedKey(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java index 76a9d0f..d2ed5d5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.SortedSet; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.client.Scan; /** @@ -34,20 +34,20 @@ public interface KeyValueScanner { * Look at the next Cell in this scanner, but do not iterate scanner. * @return the next Cell */ - Cell peek(); + ServerCell peek(); /** * Return the next Cell in this scanner, iterating the scanner * @return the next Cell */ - Cell next() throws IOException; + ServerCell next() throws IOException; /** * Seek the scanner at or after the specified KeyValue. * @param key seek value * @return true if scanner has values left, false if end of scanner */ - boolean seek(Cell key) throws IOException; + boolean seek(ServerCell key) throws IOException; /** * Reseek the scanner at or after the specified KeyValue. @@ -57,7 +57,7 @@ public interface KeyValueScanner { * @param key seek value (should be non-null) * @return true if scanner has values left, false if end of scanner */ - boolean reseek(Cell key) throws IOException; + boolean reseek(ServerCell key) throws IOException; /** * Get the sequence id associated with this KeyValueScanner. This is required @@ -98,7 +98,7 @@ public interface KeyValueScanner { * @param forward do a forward-only "reseek" instead of a random-access seek * @param useBloom whether to enable multi-column Bloom filter optimization */ - boolean requestSeek(Cell kv, boolean forward, boolean useBloom) + boolean requestSeek(ServerCell kv, boolean forward, boolean useBloom) throws IOException; /** @@ -137,7 +137,7 @@ public interface KeyValueScanner { * KeyValue does not exist * */ - public boolean backwardSeek(Cell key) throws IOException; + public boolean backwardSeek(ServerCell key) throws IOException; /** * Seek the scanner at the first Cell of the row which is the previous row @@ -146,7 +146,7 @@ public interface KeyValueScanner { * @return true if the scanner at the first valid Cell of previous row, * false if not existing such Cell */ - public boolean seekToPreviousRow(Cell key) throws IOException; + public boolean seekToPreviousRow(ServerCell key) throws IOException; /** * Seek the scanner at the first KeyValue of last row @@ -161,5 +161,5 @@ public interface KeyValueScanner { * @return the next key in the index (the key to seek to the next block) * if known, or null otherwise */ - public Cell getNextIndexedKey(); + public ServerCell getNextIndexedKey(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java index 364b9c9..557c904 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.regionserver; import java.util.List; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.util.Pair; @@ -71,7 +71,7 @@ public interface MemStore extends HeapSize { * @return approximate size of the passed KV and the newly added KV which maybe different from the * passed in KV. */ - Pair add(final Cell cell); + Pair add(final ServerCell cell); /** * @return Oldest timestamp of all the Cells in the MemStore @@ -83,14 +83,14 @@ public interface MemStore extends HeapSize { * removed. It is ok to not update timeRangeTracker in this call. * @param cell */ - void rollback(final Cell cell); + void rollback(final ServerCell cell); /** * Write a delete * @param deleteCell * @return approximate size of the passed key and value. */ - long delete(final Cell deleteCell); + long delete(final ServerCell deleteCell); /** * Find the key that matches row exactly, or the one that immediately precedes it. The @@ -131,7 +131,7 @@ public interface MemStore extends HeapSize { * @param readpoint readpoint below which we can safely remove duplicate Cells. * @return change in memstore size */ - long upsert(Iterable cells, long readpoint); + long upsert(Iterable cells, long readpoint); /** * @return scanner over the memstore. This might include scanner over the snapshot when one is diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java index 1947a1b..9756428 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java @@ -26,6 +26,7 @@ import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Mutation; @@ -94,7 +95,7 @@ MultiRowMutationProcessorResponse> { for (List cells : m.getFamilyCellMap().values()) { boolean writeToWAL = m.getDurability() != Durability.SKIP_WAL; for (Cell cell : cells) { - if (writeToWAL) walEdit.add(cell); + if (writeToWAL) walEdit.add((ServerCell) cell);// TODO } } } @@ -144,7 +145,7 @@ MultiRowMutationProcessorResponse> { if (walEditsFromCP[i] != null) { // Add the WALEdit created by CP hook for (Cell walCell : walEditsFromCP[i].getCells()) { - walEdit.add(walCell); + walEdit.add((ServerCell) walCell); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java index 957f417..84ee95d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.util.SortedSet; import org.apache.commons.lang.NotImplementedException; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.client.Scan; /** @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.client.Scan; public abstract class NonLazyKeyValueScanner implements KeyValueScanner { @Override - public boolean requestSeek(Cell kv, boolean forward, boolean useBloom) + public boolean requestSeek(ServerCell kv, boolean forward, boolean useBloom) throws IOException { return doRealSeek(this, kv, forward); } @@ -51,7 +51,7 @@ public abstract class NonLazyKeyValueScanner implements KeyValueScanner { } public static boolean doRealSeek(KeyValueScanner scanner, - Cell kv, boolean forward) throws IOException { + ServerCell kv, boolean forward) throws IOException { return forward ? scanner.reseek(kv) : scanner.seek(kv); } @@ -68,7 +68,7 @@ public abstract class NonLazyKeyValueScanner implements KeyValueScanner { return false; } @Override - public Cell getNextIndexedKey() { + public ServerCell getNextIndexedKey() { return null; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonReversedNonLazyKeyValueScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonReversedNonLazyKeyValueScanner.java index c0ab1a0..cde3803 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonReversedNonLazyKeyValueScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NonReversedNonLazyKeyValueScanner.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import org.apache.commons.lang.NotImplementedException; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; /** * A "non-reversed & non-lazy" scanner which does not support backward scanning @@ -34,13 +34,13 @@ public abstract class NonReversedNonLazyKeyValueScanner extends NonLazyKeyValueScanner { @Override - public boolean backwardSeek(Cell key) throws IOException { + public boolean backwardSeek(ServerCell key) throws IOException { throw new NotImplementedException("backwardSeek must not be called on a " + "non-reversed scanner"); } @Override - public boolean seekToPreviousRow(Cell key) throws IOException { + public boolean seekToPreviousRow(ServerCell key) throws IOException { throw new NotImplementedException("seekToPreviousRow must not be called on a " + "non-reversed scanner"); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index b0fd9eb..4c46102 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownScannerException; @@ -2215,7 +2216,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, if (maxResultSize <= 0) { maxResultSize = maxQuotaResultSize; } - List values = new ArrayList(); + List values = new ArrayList(); region.startRegionOperation(Operation.SCAN); try { int i = 0; @@ -2260,7 +2261,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, boolean skipResultSizeCalculation = state.hasResultSizeEstimate(); if (skipResultSizeCalculation) currentScanResultSize += state.getResultSize(); - for (Cell cell : values) { + for (ServerCell cell : values) { totalCellSize += CellUtil.estimatedSerializedSizeOf(cell); // If the calculation can't be skipped, then do it now. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java index 441a93b..97c417b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Append; @@ -369,7 +370,7 @@ public interface Region extends ConfigurationObserver { * always invoke cp. * @return list of cells resulting from the operation */ - List get(Get get, boolean withCoprocessor) throws IOException; + List get(Get get, boolean withCoprocessor) throws IOException; /** * Return all the data for the row that matches row exactly, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index 6e23952..f275135 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -822,7 +823,7 @@ public class RegionCoprocessorHost * @return true if default processing should be bypassed * @exception IOException Exception */ - public boolean preGet(final Get get, final List results) + public boolean preGet(final Get get, final List results) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new RegionOperation() { @Override @@ -838,7 +839,7 @@ public class RegionCoprocessorHost * @param results the result sett * @exception IOException Exception */ - public void postGet(final Get get, final List results) + public void postGet(final Get get, final List results) throws IOException { execOperation(coprocessors.isEmpty() ? null : new RegionOperation() { @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java index 26f9aef..7fe3bbc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -84,7 +84,7 @@ public interface RegionScanner extends InternalScanner { * scanner is done. * @throws IOException e */ - NextState nextRaw(List result) throws IOException; + NextState nextRaw(List result) throws IOException; /** * Grab the next row's worth of values with the default limit on the number of values to return. @@ -98,7 +98,7 @@ public interface RegionScanner extends InternalScanner { * scanner is done. * @throws IOException e */ - NextState nextRaw(List result, int limit) throws IOException; + NextState nextRaw(List result, int limit) throws IOException; /** * Grab the next row's worth of values with a limit on the number of values to return as well as a @@ -127,6 +127,6 @@ public interface RegionScanner extends InternalScanner { * scanner is done. * @throws IOException e */ - NextState nextRaw(List result, int limit, final long remainingResultSize) + NextState nextRaw(List result, int limit, final long remainingResultSize) throws IOException; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java index c7ce180..9071b30 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java @@ -23,9 +23,9 @@ import java.util.List; import org.apache.commons.lang.NotImplementedException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; /** * ReversedKeyValueHeap is used for supporting reversed scanning. Compared with @@ -48,26 +48,26 @@ public class ReversedKeyValueHeap extends KeyValueHeap { } @Override - public boolean seek(Cell seekKey) throws IOException { + public boolean seek(ServerCell seekKey) throws IOException { throw new IllegalStateException( "seek cannot be called on ReversedKeyValueHeap"); } @Override - public boolean reseek(Cell seekKey) throws IOException { + public boolean reseek(ServerCell seekKey) throws IOException { throw new IllegalStateException( "reseek cannot be called on ReversedKeyValueHeap"); } @Override - public boolean requestSeek(Cell key, boolean forward, boolean useBloom) + public boolean requestSeek(ServerCell key, boolean forward, boolean useBloom) throws IOException { throw new IllegalStateException( "requestSeek cannot be called on ReversedKeyValueHeap"); } @Override - public boolean seekToPreviousRow(Cell seekKey) throws IOException { + public boolean seekToPreviousRow(ServerCell seekKey) throws IOException { if (current == null) { return false; } @@ -76,7 +76,7 @@ public class ReversedKeyValueHeap extends KeyValueHeap { KeyValueScanner scanner; while ((scanner = heap.poll()) != null) { - Cell topKey = scanner.peek(); + ServerCell topKey = scanner.peek(); if (comparator.getComparator().compareRows(topKey.getRowArray(), topKey.getRowOffset(), topKey.getRowLength(), seekKey.getRowArray(), seekKey.getRowOffset(), seekKey.getRowLength()) < 0) { @@ -98,7 +98,7 @@ public class ReversedKeyValueHeap extends KeyValueHeap { } @Override - public boolean backwardSeek(Cell seekKey) throws IOException { + public boolean backwardSeek(ServerCell seekKey) throws IOException { if (current == null) { return false; } @@ -107,7 +107,7 @@ public class ReversedKeyValueHeap extends KeyValueHeap { KeyValueScanner scanner; while ((scanner = heap.poll()) != null) { - Cell topKey = scanner.peek(); + ServerCell topKey = scanner.peek(); if ((CellUtil.matchingRow(seekKey, topKey) && comparator .getComparator().compare(seekKey, topKey) <= 0) || comparator.getComparator().compareRows(seekKey, topKey) > 0) { @@ -125,12 +125,12 @@ public class ReversedKeyValueHeap extends KeyValueHeap { } @Override - public Cell next() throws IOException { + public ServerCell next() throws IOException { if (this.current == null) { return null; } - Cell kvReturn = this.current.next(); - Cell kvNext = this.current.peek(); + ServerCell kvReturn = this.current.next(); + ServerCell kvNext = this.current.peek(); if (kvNext == null || this.comparator.kvComparator.compareRows(kvNext, kvReturn) > 0) { if (this.current.seekToPreviousRow(kvReturn)) { @@ -181,7 +181,7 @@ public class ReversedKeyValueHeap extends KeyValueHeap { * @param right * @return less than 0 if left is smaller, 0 if equal etc.. */ - public int compareRows(Cell left, Cell right) { + public int compareRows(ServerCell left, ServerCell right) { return super.kvComparator.compareRows(left, right); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java index e319f90..8cd777b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java @@ -23,11 +23,11 @@ import java.util.List; import java.util.NavigableSet; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; /** @@ -70,7 +70,7 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { @Override protected void seekScanners(List scanners, - Cell seekKey, boolean isLazy, boolean isParallelSeek) + ServerCell seekKey, boolean isLazy, boolean isParallelSeek) throws IOException { // Seek all scanners to the start of the Row (or if the exact matching row // key does not exist, then to the start of the previous matching Row). @@ -86,7 +86,7 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { } @Override - protected boolean seekToNextRow(Cell kv) throws IOException { + protected boolean seekToNextRow(ServerCell kv) throws IOException { return seekToPreviousRow(kv); } @@ -94,12 +94,12 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { * Do a backwardSeek in a reversed StoreScanner(scan backward) */ @Override - protected boolean seekAsDirection(Cell kv) throws IOException { + protected boolean seekAsDirection(ServerCell kv) throws IOException { return backwardSeek(kv); } @Override - protected void checkScanOrder(Cell prevKV, Cell kv, + protected void checkScanOrder(ServerCell prevKV, ServerCell kv, KeyValue.KVComparator comparator) throws IOException { // Check that the heap gives us KVs in an increasing order for same row and // decreasing order for different rows. @@ -111,19 +111,19 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { } @Override - public boolean reseek(Cell kv) throws IOException { + public boolean reseek(ServerCell kv) throws IOException { throw new IllegalStateException( "reseek cannot be called on ReversedStoreScanner"); } @Override - public boolean seek(Cell key) throws IOException { + public boolean seek(ServerCell key) throws IOException { throw new IllegalStateException( "seek cannot be called on ReversedStoreScanner"); } @Override - public boolean seekToPreviousRow(Cell key) throws IOException { + public boolean seekToPreviousRow(ServerCell key) throws IOException { lock.lock(); try { checkReseek(); @@ -135,7 +135,7 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { } @Override - public boolean backwardSeek(Cell key) throws IOException { + public boolean backwardSeek(ServerCell key) throws IOException { lock.lock(); try { checkReseek(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java index a5c17fb..2665a14 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java @@ -23,8 +23,8 @@ import java.util.SortedSet; import java.util.TreeSet; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.util.Bytes; /** @@ -69,7 +69,7 @@ public class ScanDeleteTracker implements DeleteTracker { * @param cell - the delete cell */ @Override - public void add(Cell cell) { + public void add(ServerCell cell) { long timestamp = cell.getTimestamp(); int qualifierOffset = cell.getQualifierOffset(); int qualifierLength = cell.getQualifierLength(); @@ -109,7 +109,7 @@ public class ScanDeleteTracker implements DeleteTracker { * @return deleteResult */ @Override - public DeleteResult isDeleted(Cell cell) { + public DeleteResult isDeleted(ServerCell cell) { long timestamp = cell.getTimestamp(); int qualifierOffset = cell.getQualifierOffset(); int qualifierLength = cell.getQualifierLength(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java index 032b4ce..b6cca45 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java @@ -24,12 +24,12 @@ import java.util.NavigableSet; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter.ReturnCode; @@ -83,7 +83,7 @@ public class ScanQueryMatcher { private final ColumnTracker columns; /** Key to seek to in memstore and StoreFiles */ - private final Cell startKey; + private final ServerCell startKey; /** Row comparator for the region this query is for */ private final KeyValue.KVComparator rowComparator; @@ -274,7 +274,7 @@ public class ScanQueryMatcher { * @throws IOException in case there is an internal consistency problem * caused by a data corruption. */ - public MatchCode match(Cell cell) throws IOException { + public MatchCode match(ServerCell cell) throws IOException { if (filter != null && filter.filterAllRemaining()) { return MatchCode.DONE_SCAN; } @@ -493,7 +493,7 @@ public class ScanQueryMatcher { } } - public boolean moreRowsMayExistAfter(Cell kv) { + public boolean moreRowsMayExistAfter(ServerCell kv) { if (this.isReversed) { if (rowComparator.compareRows(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), stopRow, 0, stopRow.length) <= 0) { @@ -536,7 +536,7 @@ public class ScanQueryMatcher { * * @return the start key */ - public Cell getStartKey() { + public ServerCell getStartKey() { return this.startKey; } @@ -548,7 +548,7 @@ public class ScanQueryMatcher { return this.filter; } - public Cell getNextKeyHint(Cell kv) throws IOException { + public ServerCell getNextKeyHint(ServerCell kv) throws IOException { if (filter == null) { return null; } else { @@ -556,7 +556,7 @@ public class ScanQueryMatcher { } } - public Cell getKeyForNextColumn(Cell kv) { + public ServerCell getKeyForNextColumn(ServerCell kv) { ColumnCount nextColumn = columns.getColumnHint(); if (nextColumn == null) { return KeyValueUtil.createLastOnRow( @@ -571,7 +571,7 @@ public class ScanQueryMatcher { } } - public Cell getKeyForNextRow(Cell kv) { + public ServerCell getKeyForNextRow(ServerCell kv) { return KeyValueUtil.createLastOnRow( kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), null, 0, 0, @@ -583,7 +583,7 @@ public class ScanQueryMatcher { * @param kv The Cell we're using to calculate the seek key * @return result of the compare between the indexed key and the key portion of the passed cell */ - public int compareKeyForNextRow(Cell nextIndexed, Cell kv) { + public int compareKeyForNextRow(ServerCell nextIndexed, ServerCell kv) { return rowComparator.compareKey(nextIndexed, kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), null, 0, 0, @@ -596,7 +596,7 @@ public class ScanQueryMatcher { * @param kv The Cell we're using to calculate the seek key * @return result of the compare between the indexed key and the key portion of the passed cell */ - public int compareKeyForNextColumn(Cell nextIndexed, Cell kv) { + public int compareKeyForNextColumn(ServerCell nextIndexed, ServerCell kv) { ColumnCount nextColumn = columns.getColumnHint(); if (nextColumn == null) { return rowComparator.compareKey(nextIndexed, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java index a77fc0e..ef4c422 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java @@ -26,11 +26,11 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver; @@ -121,14 +121,14 @@ public interface Store extends HeapSize, StoreConfigInformation, PropagatingConf * @return memstore size delta * @throws IOException */ - long upsert(Iterable cells, long readpoint) throws IOException; + long upsert(Iterable cells, long readpoint) throws IOException; /** * Adds a value to the memstore * @param cell * @return memstore size delta & newly added KV which maybe different than the passed in KV */ - Pair add(Cell cell); + Pair add(ServerCell cell); /** * When was the last edit done in the memstore @@ -140,7 +140,7 @@ public interface Store extends HeapSize, StoreConfigInformation, PropagatingConf * key & memstoreTS value of the cell parameter. * @param cell */ - void rollback(final Cell cell); + void rollback(final ServerCell cell); /** * Find the key that matches row exactly, or the one that immediately precedes it. WARNING: @@ -154,7 +154,7 @@ public interface Store extends HeapSize, StoreConfigInformation, PropagatingConf * @return Found Cell or null if none found. * @throws IOException */ - Cell getRowKeyAtOrBefore(final byte[] row) throws IOException; + ServerCell getRowKeyAtOrBefore(final byte[] row) throws IOException; FileSystem getFileSystem(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index c1a6b76..c1e7eff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -36,13 +36,13 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; @@ -709,9 +709,9 @@ public class StoreFile { private byte[] lastBloomKey; private int lastBloomKeyOffset, lastBloomKeyLen; private KVComparator kvComparator; - private Cell lastCell = null; + private ServerCell lastCell = null; private long earliestPutTs = HConstants.LATEST_TIMESTAMP; - private Cell lastDeleteFamilyCell = null; + private ServerCell lastDeleteFamilyCell = null; private long deleteFamilyCnt = 0; /** Bytes per Checksum */ @@ -823,7 +823,7 @@ public class StoreFile { * update TimeRangeTracker to include the timestamp of this key * @param cell */ - public void trackTimestamps(final Cell cell) { + public void trackTimestamps(final ServerCell cell) { if (KeyValue.Type.Put.getCode() == cell.getTypeByte()) { earliestPutTs = Math.min(earliestPutTs, cell.getTimestamp()); } @@ -832,7 +832,7 @@ public class StoreFile { } } - private void appendGeneralBloomfilter(final Cell cell) throws IOException { + private void appendGeneralBloomfilter(final ServerCell cell) throws IOException { if (this.generalBloomFilterWriter != null) { // only add to the bloom filter on a new, unique key boolean newKey = true; @@ -903,7 +903,7 @@ public class StoreFile { } } - private void appendDeleteFamilyBloomFilter(final Cell cell) + private void appendDeleteFamilyBloomFilter(final ServerCell cell) throws IOException { if (!CellUtil.isDeleteFamily(cell) && !CellUtil.isDeleteFamilyVersion(cell)) { return; @@ -924,7 +924,7 @@ public class StoreFile { } } - public void append(final Cell cell) throws IOException { + public void append(final ServerCell cell) throws IOException { appendGeneralBloomfilter(cell); appendDeleteFamilyBloomFilter(cell); writer.append(cell); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java index 11993db..a1785bc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java @@ -24,8 +24,8 @@ import java.util.Iterator; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import com.google.common.collect.ImmutableCollection; @@ -112,7 +112,7 @@ public interface StoreFileManager { * @return The list to replace candidateFiles. */ Iterator updateCandidateFilesForRowKeyBefore( - Iterator candidateFiles, KeyValue targetKey, Cell candidate + Iterator candidateFiles, KeyValue targetKey, ServerCell candidate ); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java index a8ee091..f7e1c57 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java @@ -29,11 +29,11 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.StoreFile.Reader; @@ -49,11 +49,11 @@ public class StoreFileScanner implements KeyValueScanner { // the reader it comes from: private final StoreFile.Reader reader; private final HFileScanner hfs; - private Cell cur = null; + private ServerCell cur = null; private boolean realSeekDone; private boolean delayedReseek; - private Cell delayedSeekKV; + private ServerCell delayedSeekKV; private boolean enforceMVCC = false; private boolean hasMVCCInfo = false; @@ -126,12 +126,12 @@ public class StoreFileScanner implements KeyValueScanner { return "StoreFileScanner[" + hfs.toString() + ", cur=" + cur + "]"; } - public Cell peek() { + public ServerCell peek() { return cur; } - public Cell next() throws IOException { - Cell retKey = cur; + public ServerCell next() throws IOException { + ServerCell retKey = cur; try { // only seek if we aren't at the end. cur == null implies 'end'. @@ -148,7 +148,7 @@ public class StoreFileScanner implements KeyValueScanner { return retKey; } - public boolean seek(Cell key) throws IOException { + public boolean seek(ServerCell key) throws IOException { if (seekCount != null) seekCount.incrementAndGet(); try { @@ -173,7 +173,7 @@ public class StoreFileScanner implements KeyValueScanner { } } - public boolean reseek(Cell key) throws IOException { + public boolean reseek(ServerCell key) throws IOException { if (seekCount != null) seekCount.incrementAndGet(); try { @@ -198,7 +198,7 @@ public class StoreFileScanner implements KeyValueScanner { } } - protected void setCurrentCell(Cell newVal) throws IOException { + protected void setCurrentCell(ServerCell newVal) throws IOException { this.cur = newVal; if (this.cur != null && this.reader.isBulkLoaded()) { CellUtil.setSequenceId(cur, this.reader.getSequenceID()); @@ -208,7 +208,7 @@ public class StoreFileScanner implements KeyValueScanner { protected boolean skipKVsNewerThanReadpoint() throws IOException { // We want to ignore all key-values that are newer than our current // readPoint - Cell startKV = cur; + ServerCell startKV = cur; while(enforceMVCC && cur != null && (cur.getMvccVersion() > readPt)) { @@ -242,7 +242,7 @@ public class StoreFileScanner implements KeyValueScanner { * @return false if not found or if k is after the end. * @throws IOException */ - public static boolean seekAtOrAfter(HFileScanner s, Cell k) + public static boolean seekAtOrAfter(HFileScanner s, ServerCell k) throws IOException { int result = s.seekTo(k); if(result < 0) { @@ -261,7 +261,7 @@ public class StoreFileScanner implements KeyValueScanner { return true; } - static boolean reseekAtOrAfter(HFileScanner s, Cell k) + static boolean reseekAtOrAfter(HFileScanner s, ServerCell k) throws IOException { //This function is similar to seekAtOrAfter function int result = s.reseekTo(k); @@ -303,7 +303,7 @@ public class StoreFileScanner implements KeyValueScanner { * row/column and use OLDEST_TIMESTAMP in the seek key. */ @Override - public boolean requestSeek(Cell kv, boolean forward, boolean useBloom) + public boolean requestSeek(ServerCell kv, boolean forward, boolean useBloom) throws IOException { if (kv.getFamilyLength() == 0) { useBloom = false; @@ -416,7 +416,7 @@ public class StoreFileScanner implements KeyValueScanner { @Override @SuppressWarnings("deprecation") - public boolean seekToPreviousRow(Cell key) throws IOException { + public boolean seekToPreviousRow(ServerCell key) throws IOException { try { try { KeyValue seekKey = KeyValueUtil.createFirstOnRow(key.getRowArray(), key.getRowOffset(), @@ -474,7 +474,7 @@ public class StoreFileScanner implements KeyValueScanner { } @Override - public boolean backwardSeek(Cell key) throws IOException { + public boolean backwardSeek(ServerCell key) throws IOException { seek(key); if (cur == null || getComparator().compareRows(cur.getRowArray(), cur.getRowOffset(), @@ -486,7 +486,7 @@ public class StoreFileScanner implements KeyValueScanner { } @Override - public Cell getNextIndexedKey() { + public ServerCell getNextIndexedKey() { return hfs.getNextIndexedKey(); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java index 831673d..0e88fa4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java @@ -25,8 +25,8 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.monitoring.MonitoredTask; @@ -110,12 +110,12 @@ abstract class StoreFlusher { Compactor.CellSink sink, long smallestReadPoint) throws IOException { int compactionKVMax = conf.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT); - List kvs = new ArrayList(); + List kvs = new ArrayList(); boolean hasMore; do { hasMore = NextState.hasMoreValues(scanner.next(kvs, compactionKVMax)); if (!kvs.isEmpty()) { - for (Cell c : kvs) { + for (ServerCell c : kvs) { // If we know that this KV is going to be included always, then let us // set its memstoreTS to 0. This will help us save space when writing to // disk. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java index 298d5bc..0775a76 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java @@ -30,13 +30,13 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Scan; @@ -87,7 +87,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * KVs skipped via seeking to next row/column. TODO: estimate them? */ private long kvsScanned = 0; - private Cell prevCell = null; + private ServerCell prevCell = null; /** We don't ever expect to change this, the constant is just for clarity. */ static final boolean LAZY_SEEK_ENABLED_BY_DEFAULT = true; @@ -99,7 +99,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner LAZY_SEEK_ENABLED_BY_DEFAULT; // if heap == null and lastTop != null, you need to reseek given the key below - protected Cell lastTop = null; + protected ServerCell lastTop = null; // A flag whether use pread for scan private boolean scanUsePread = false; @@ -318,7 +318,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * @throws IOException */ protected void seekScanners(List scanners, - Cell seekKey, boolean isLazy, boolean isParallelSeek) + ServerCell seekKey, boolean isLazy, boolean isParallelSeek) throws IOException { // Seek all scanners to the start of the Row (or if the exact matching row // key does not exist, then to the start of the next matching Row). @@ -337,7 +337,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner + ", but row is bigger than that"); } scanner.seek(seekKey); - Cell c = scanner.peek(); + ServerCell c = scanner.peek(); if (c != null) { totalScannersSoughtBytes += CellUtil.estimatedSerializedSizeOf(c); } @@ -394,7 +394,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner } @Override - public Cell peek() { + public ServerCell peek() { lock.lock(); try { if (this.heap == null) { @@ -431,7 +431,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner } @Override - public boolean seek(Cell key) throws IOException { + public boolean seek(ServerCell key) throws IOException { lock.lock(); try { // reset matcher state, in case that underlying store changed @@ -449,7 +449,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * @return true if there are more rows, false if scanner is done */ @Override - public NextState next(List outResult, int limit) throws IOException { + public NextState next(List outResult, int limit) throws IOException { // -1 means no limit return next(outResult, limit, -1); } @@ -462,7 +462,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * @return true if there are more rows, false if scanner is done */ @Override - public NextState next(List outResult, int limit, long remainingResultSize) + public NextState next(List outResult, int limit, long remainingResultSize) throws IOException { lock.lock(); try { @@ -477,7 +477,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner return NextState.makeState(NextState.State.NO_MORE_VALUES, 0); } - Cell peeked = this.heap.peek(); + ServerCell peeked = this.heap.peek(); if (peeked == null) { close(); return NextState.makeState(NextState.State.NO_MORE_VALUES, 0); @@ -499,7 +499,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner matcher.setRow(row, offset, length); } - Cell cell; + ServerCell cell; // Only do a sanity-check if store and comparator are available. KeyValue.KVComparator comparator = @@ -597,7 +597,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner case SEEK_NEXT_USING_HINT: // TODO convert resee to Cell? - Cell nextKV = matcher.getNextKeyHint(cell); + ServerCell nextKV = matcher.getNextKeyHint(cell); if (nextKV != null) { seekAsDirection(nextKV); } else { @@ -626,8 +626,8 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * See if we should actually SEEK or rather just SKIP to the next Cell. * (see HBASE-13109) */ - private ScanQueryMatcher.MatchCode optimize(ScanQueryMatcher.MatchCode qcode, Cell cell) { - Cell nextIndexedKey = getNextIndexedKey(); + private ScanQueryMatcher.MatchCode optimize(ScanQueryMatcher.MatchCode qcode, ServerCell cell) { + ServerCell nextIndexedKey = getNextIndexedKey(); if (nextIndexedKey == null || nextIndexedKey == HConstants.NO_NEXT_INDEXED_KEY || store == null) { return qcode; @@ -656,7 +656,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner } @Override - public NextState next(List outResult) throws IOException { + public NextState next(List outResult) throws IOException { return next(outResult, -1); } @@ -710,7 +710,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner return false; } - protected void resetScannerStack(Cell lastTopKey) throws IOException { + protected void resetScannerStack(ServerCell lastTopKey) throws IOException { if (heap != null) { throw new RuntimeException("StoreScanner.reseek run on an existing heap!"); } @@ -729,7 +729,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner // Reset the state of the Query Matcher and set to top row. // Only reset and call setRow if the row changes; avoids confusing the // query matcher if scanning intra-row. - Cell kv = heap.peek(); + ServerCell kv = heap.peek(); if (kv == null) { kv = lastTopKey; } @@ -751,7 +751,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * @param comparator * @throws IOException */ - protected void checkScanOrder(Cell prevKV, Cell kv, + protected void checkScanOrder(ServerCell prevKV, ServerCell kv, KeyValue.KVComparator comparator) throws IOException { // Check that the heap gives us KVs in an increasing order. assert prevKV == null || comparator == null @@ -759,7 +759,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner + " followed by a " + "smaller key " + kv + " in cf " + store; } - protected boolean seekToNextRow(Cell kv) throws IOException { + protected boolean seekToNextRow(ServerCell kv) throws IOException { return reseek(KeyValueUtil.createLastOnRow(kv)); } @@ -769,13 +769,13 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * @return true if scanner has values left, false if end of scanner * @throws IOException */ - protected boolean seekAsDirection(Cell kv) + protected boolean seekAsDirection(ServerCell kv) throws IOException { return reseek(kv); } @Override - public boolean reseek(Cell kv) throws IOException { + public boolean reseek(ServerCell kv) throws IOException { lock.lock(); try { //Heap will not be null, if this is called from next() which. @@ -803,7 +803,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * @throws IOException */ private void parallelSeek(final List - scanners, final Cell kv) throws IOException { + scanners, final ServerCell kv) throws IOException { if (scanners.isEmpty()) return; int storeFileScannerCount = scanners.size(); CountDownLatch latch = new CountDownLatch(storeFileScannerCount); @@ -860,7 +860,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner } @Override - public Cell getNextIndexedKey() { + public ServerCell getNextIndexedKey() { return this.heap.getNextIndexedKey(); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java index 5b4c4db..9694b14 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java @@ -26,8 +26,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.regionserver.compactions.Compactor; import org.apache.hadoop.hbase.util.Bytes; @@ -159,7 +159,7 @@ public abstract class StripeMultiFileWriter implements Compactor.CellSink { private StoreFile.Writer currentWriter; private byte[] currentWriterEndKey; - private Cell lastCell; + private ServerCell lastCell; private long cellsInCurrentWriter = 0; private int majorRangeFromIndex = -1, majorRangeToIndex = -1; private boolean hasAnyWriter = false; @@ -193,7 +193,7 @@ public abstract class StripeMultiFileWriter implements Compactor.CellSink { } @Override - public void append(Cell cell) throws IOException { + public void append(ServerCell cell) throws IOException { if (currentWriter == null && existingWriters.isEmpty()) { // First append ever, do a sanity check. sanityCheckLeft(this.boundaries.get(0), @@ -205,7 +205,7 @@ public abstract class StripeMultiFileWriter implements Compactor.CellSink { ++cellsInCurrentWriter; } - private boolean isCellAfterCurrentWriter(Cell cell) { + private boolean isCellAfterCurrentWriter(ServerCell cell) { return ((currentWriterEndKey != StripeStoreFileManager.OPEN_KEY) && (comparator.compareRows(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), currentWriterEndKey, 0, currentWriterEndKey.length) >= 0)); @@ -223,7 +223,7 @@ public abstract class StripeMultiFileWriter implements Compactor.CellSink { } } - private void prepareWriterFor(Cell cell) throws IOException { + private void prepareWriterFor(ServerCell cell) throws IOException { if (currentWriter != null && !isCellAfterCurrentWriter(cell)) return; // Use same writer. stopUsingCurrentWriter(); @@ -295,7 +295,7 @@ public abstract class StripeMultiFileWriter implements Compactor.CellSink { private byte[] left; private byte[] right; - private Cell lastCell; + private ServerCell lastCell; private StoreFile.Writer currentWriter; protected byte[] lastRowInCurrentWriter = null; private long cellsInCurrentWriter = 0; @@ -320,7 +320,7 @@ public abstract class StripeMultiFileWriter implements Compactor.CellSink { } @Override - public void append(Cell cell) throws IOException { + public void append(ServerCell cell) throws IOException { // If we are waiting for opportunity to close and we started writing different row, // discard the writer and stop waiting. boolean doCreateWriter = false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java index dff6765..f78c831 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java @@ -32,10 +32,10 @@ import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy; import org.apache.hadoop.hbase.util.Bytes; @@ -176,11 +176,11 @@ public class StripeStoreFileManager } /** See {@link StoreFileManager#getCandidateFilesForRowKeyBefore(KeyValue)} and - * {@link StoreFileManager#updateCandidateFilesForRowKeyBefore(Iterator, KeyValue, Cell)} + * {@link StoreFileManager#updateCandidateFilesForRowKeyBefore(Iterator, KeyValue, ServerCell)} * for details on this methods. */ @Override public Iterator updateCandidateFilesForRowKeyBefore( - Iterator candidateFiles, final KeyValue targetKey, final Cell candidate) { + Iterator candidateFiles, final KeyValue targetKey, final ServerCell candidate) { KeyBeforeConcatenatedLists.Iterator original = (KeyBeforeConcatenatedLists.Iterator)candidateFiles; assert original != null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java index 3c3ea6b..2e18a5a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java @@ -27,10 +27,10 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.compress.Compression; @@ -78,7 +78,7 @@ public abstract class Compactor { } public interface CellSink { - void append(Cell cell) throws IOException; + void append(ServerCell cell) throws IOException; } public CompactionProgress getProgress() { @@ -236,7 +236,7 @@ public abstract class Compactor { long bytesWrittenProgress = 0; // Since scanner.next() can return 'false' but still be delivering data, // we have to use a do/while loop. - List cells = new ArrayList(); + List cells = new ArrayList(); long closeCheckInterval = HStore.getCloseCheckInterval(); long lastMillis = 0; if (LOG.isDebugEnabled()) { @@ -254,7 +254,7 @@ public abstract class Compactor { now = EnvironmentEdgeManager.currentTime(); } // output to writer: - for (Cell c : cells) { + for (ServerCell c : cells) { if (cleanSeqId && c.getSequenceId() <= smallestReadPoint) { CellUtil.setSequenceId(c, 0); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java index b2a7649..9f53f68 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java @@ -25,6 +25,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.executor.EventHandler; import org.apache.hadoop.hbase.executor.EventType; import org.apache.hadoop.hbase.regionserver.KeyValueScanner; @@ -36,16 +37,16 @@ import org.apache.hadoop.hbase.regionserver.KeyValueScanner; public class ParallelSeekHandler extends EventHandler { private static final Log LOG = LogFactory.getLog(ParallelSeekHandler.class); private KeyValueScanner scanner; - private Cell keyValue; + private ServerCell cell; private long readPoint; private CountDownLatch latch; private Throwable err = null; - public ParallelSeekHandler(KeyValueScanner scanner,Cell keyValue, + public ParallelSeekHandler(KeyValueScanner scanner, ServerCell cell, long readPoint, CountDownLatch latch) { super(null, EventType.RS_PARALLEL_SEEK); this.scanner = scanner; - this.keyValue = keyValue; + this.cell = cell; this.readPoint = readPoint; this.latch = latch; } @@ -53,7 +54,7 @@ public class ParallelSeekHandler extends EventHandler { @Override public void process() { try { - scanner.seek(keyValue); + scanner.seek(cell); } catch (IOException e) { LOG.error("", e); setErr(e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index a80c508..7cd0976 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -25,9 +25,11 @@ import java.io.OutputStream; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.codec.BaseDecoder; import org.apache.hadoop.hbase.codec.BaseEncoder; import org.apache.hadoop.hbase.codec.Codec; @@ -223,6 +225,11 @@ public class WALCellCodec implements Codec { } } + @Override + public void write(ServerCell cell) throws IOException { + write((Cell) cell); + } + private void write(byte[] data, int offset, int length, Dictionary dict) throws IOException { short dictIdx = Dictionary.NOT_IN_DICTIONARY; if (dict != null) { @@ -336,7 +343,12 @@ public class WALCellCodec implements Codec { public void write(Cell cell) throws IOException { checkFlushed(); // Make sure to write tags into WAL - KeyValueUtil.oswrite(cell, this.out, true); + CellUtil.oswrite(cell, this.out, true); + } + + @Override + public void write(ServerCell cell) throws IOException { + write((Cell) cell); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java index 48a982e..33ce57a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.FilterBase; @@ -87,7 +87,7 @@ class AccessControlFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell cell) { + public ReturnCode filterKeyValue(ServerCell cell) { if (isSystemTable) { return ReturnCode.INCLUDE; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index a01f8a1..c68850e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; @@ -367,12 +368,12 @@ public class AccessControlLists { iScanner = aclRegion.getScanner(scan); while (true) { - List row = new ArrayList(); + List row = new ArrayList(); boolean hasNext = NextState.hasMoreValues(iScanner.next(row)); ListMultimap perms = ArrayListMultimap.create(); byte[] entry = null; - for (Cell kv : row) { + for (ServerCell kv : row) { if (entry == null) { entry = CellUtil.cloneRow(kv); } @@ -511,10 +512,10 @@ public class AccessControlLists { byte[] entryName, Result result) { ListMultimap perms = ArrayListMultimap.create(); if (result != null && result.size() > 0) { - for (Cell kv : result.rawCells()) { + for (Cell cell : result.rawCells()) { Pair permissionsOfUserOnTable = - parsePermissionRecord(entryName, kv); + parsePermissionRecord(entryName, (ServerCell) cell); if (permissionsOfUserOnTable != null) { String username = permissionsOfUserOnTable.getFirst(); @@ -527,7 +528,7 @@ public class AccessControlLists { } private static Pair parsePermissionRecord( - byte[] entryName, Cell kv) { + byte[] entryName, ServerCell kv) { // return X given a set of permissions encoded in the permissionRecord kv. byte[] family = CellUtil.cloneFamily(kv); @@ -681,7 +682,7 @@ public class AccessControlLists { return Arrays.copyOfRange(namespace, 1, namespace.length); } - public static List getCellPermissionsForUser(User user, Cell cell) + public static List getCellPermissionsForUser(User user, ServerCell cell) throws IOException { // Save an object allocation where we can if (cell.getTagsLength() == 0) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index f5e2aa5..d657156 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -803,8 +804,8 @@ public class AccessController extends BaseMasterAndRegionObserver } } RegionScanner scanner = getRegion(e).getScanner(new Scan(get)); - List cells = Lists.newArrayList(); - Cell prevCell = null; + List cells = Lists.newArrayList(); + ServerCell prevCell = null; ByteRange curFam = new SimpleMutableByteRange(); boolean curColAllVersions = (request == OpType.DELETE); long curColCheckTs = opTs; @@ -815,7 +816,7 @@ public class AccessController extends BaseMasterAndRegionObserver cells.clear(); // scan with limit as 1 to hold down memory use on wide rows more = NextState.hasMoreValues(scanner.next(cells, 1)); - for (Cell cell: cells) { + for (ServerCell cell: cells) { if (LOG.isTraceEnabled()) { LOG.trace("Found cell " + cell); } @@ -1543,7 +1544,7 @@ public class AccessController extends BaseMasterAndRegionObserver @Override public void preGetOp(final ObserverContext c, - final Get get, final List result) throws IOException { + final Get get, final List result) throws IOException { internalPreRead(c, get, OpType.GET); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java index d043735..667b07e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java @@ -29,7 +29,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.security.User; @@ -367,7 +367,7 @@ public class TableAuthManager { /** * Authorize a user for a given KV. This is called from AccessControlFilter. */ - public boolean authorize(User user, TableName table, Cell cell, Permission.Action action) { + public boolean authorize(User user, TableName table, ServerCell cell, Permission.Action action) { try { List perms = AccessControlLists.getCellPermissionsForUser(user, cell); if (LOG.isTraceEnabled()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java index 34ccb4a..04d9fff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java @@ -42,10 +42,10 @@ import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -151,13 +151,13 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService } } - protected List> getExistingLabelsWithAuths() throws IOException { + protected List> getExistingLabelsWithAuths() throws IOException { Scan scan = new Scan(); RegionScanner scanner = labelsRegion.getScanner(scan); - List> existingLabels = new ArrayList>(); + List> existingLabels = new ArrayList>(); try { while (true) { - List cells = new ArrayList(); + List cells = new ArrayList(); scanner.next(cells); if (cells.isEmpty()) { break; @@ -171,11 +171,11 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService } protected Pair, Map>> extractLabelsAndAuths( - List> labelDetails) { + List> labelDetails) { Map labels = new HashMap(); Map> userAuths = new HashMap>(); - for (List cells : labelDetails) { - for (Cell cell : cells) { + for (List cells : labelDetails) { + for (ServerCell cell : cells) { if (Bytes.equals(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), LABEL_QUALIFIER, 0, LABEL_QUALIFIER.length)) { labels.put( @@ -341,11 +341,11 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService ArrayList auths = new ArrayList(); RegionScanner scanner = this.labelsRegion.getScanner(s); try { - List results = new ArrayList(1); + List results = new ArrayList(1); while (true) { scanner.next(results); if (results.isEmpty()) break; - Cell cell = results.get(0); + ServerCell cell = results.get(0); int ordinal = Bytes.toInt(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); String label = this.labelsCache.getLabel(ordinal); if (label != null) { @@ -378,11 +378,11 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService Set auths = new HashSet(); RegionScanner scanner = this.labelsRegion.getScanner(s); try { - List results = new ArrayList(1); + List results = new ArrayList(1); while (true) { scanner.next(results); if (results.isEmpty()) break; - Cell cell = results.get(0); + ServerCell cell = results.get(0); int ordinal = Bytes.toInt(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); String label = this.labelsCache.getLabel(ordinal); if (label != null) { @@ -455,7 +455,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService if (isReadFromSystemAuthUser()) { return new VisibilityExpEvaluator() { @Override - public boolean evaluate(Cell cell) throws IOException { + public boolean evaluate(ServerCell cell) throws IOException { return true; } }; @@ -485,7 +485,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService return new VisibilityExpEvaluator() { @Override - public boolean evaluate(Cell cell) throws IOException { + public boolean evaluate(ServerCell cell) throws IOException { boolean visibilityTagPresent = false; // Save an object allocation where we can if (cell.getTagsLength() > 0) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index fbfc333..037108f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MetaTableAccessor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagRewriteCell; @@ -386,7 +387,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements } get.setFilter(new DeleteVersionVisibilityExpressionFilter(visibilityTags, VisibilityConstants.SORTED_ORDINAL_SERIALIZATION_FORMAT)); - List result = ctx.getEnvironment().getRegion().get(get, false); + List result = ctx.getEnvironment().getRegion().get(get, false); if (result.size() < get.getMaxVersions()) { // Nothing to delete @@ -397,7 +398,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements throw new RuntimeException("Unexpected size: " + result.size() + ". Results more than the max versions obtained."); } - Cell getCell = result.get(get.getMaxVersions() - 1); + ServerCell getCell = result.get(get.getMaxVersions() - 1); CellUtil.setTimestamp(cell, getCell.getTimestamp()); // We are bypassing here because in the HRegion.updateDeleteLatestVersionTimeStamp we would @@ -606,7 +607,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements } @Override - public void preGetOp(ObserverContext e, Get get, List results) + public void preGetOp(ObserverContext e, Get get, List results) throws IOException { if (!initialized) { throw new VisibilityControllerNotReadyException("VisibilityController not yet initialized!"); @@ -709,8 +710,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements } } - Cell rewriteCell = new TagRewriteCell(newCell, Tag.fromList(tags)); - return rewriteCell; + return new TagRewriteCell(newCell, Tag.fromList(tags)); } @Override @@ -1007,7 +1007,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements } @Override - public ReturnCode filterKeyValue(Cell cell) throws IOException { + public ReturnCode filterKeyValue(ServerCell cell) throws IOException { List putVisTags = new ArrayList(); Byte putCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(cell, putVisTags); boolean matchFound = VisibilityLabelServiceManager diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java index a720127..c28a725 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java @@ -22,7 +22,7 @@ import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; /** * During the read (ie. get/Scan) the VisibilityController calls this interface for each of the @@ -38,5 +38,5 @@ public interface VisibilityExpEvaluator { * @param cell Cell under evaluation * @return true if this cell can be included in the Result. Else false. */ - boolean evaluate(Cell cell) throws IOException; + boolean evaluate(ServerCell cell) throws IOException; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java index eb8abbe..7514862 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.Bytes; @@ -50,7 +50,7 @@ class VisibilityLabelFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell cell) throws IOException { + public ReturnCode filterKeyValue(ServerCell cell) throws IOException { if (curFamily.getBytes() == null || (Bytes.compareTo(curFamily.getBytes(), curFamily.getOffset(), curFamily.getLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()) != 0)) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java index 80e1d5d..40a8816 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java @@ -30,9 +30,9 @@ import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.regionserver.ScanDeleteTracker; import org.apache.hadoop.hbase.util.Bytes; @@ -72,7 +72,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker { } @Override - public void add(Cell delCell) { + public void add(ServerCell delCell) { //Cannot call super.add because need to find if the delete needs to be considered long timestamp = delCell.getTimestamp(); int qualifierOffset = delCell.getQualifierOffset(); @@ -115,7 +115,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker { extractDeleteCellVisTags(delCell, KeyValue.Type.codeToType(type)); } - private void extractDeleteCellVisTags(Cell delCell, Type type) { + private void extractDeleteCellVisTags(ServerCell delCell, Type type) { // If tag is present in the delete if (delCell.getTagsLength() > 0) { switch (type) { @@ -183,7 +183,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker { } @Override - public DeleteResult isDeleted(Cell cell) { + public DeleteResult isDeleted(ServerCell cell) { long timestamp = cell.getTimestamp(); int qualifierOffset = cell.getQualifierOffset(); int qualifierLength = cell.getQualifierLength(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java index d495b69..983636f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -243,7 +244,7 @@ public class VisibilityUtils { * @param tags - the array that will be populated if visibility tags are present * @return The visibility tags serialization format */ - public static Byte extractVisibilityTags(Cell cell, List tags) { + public static Byte extractVisibilityTags(ServerCell cell, List tags) { Byte serializationFormat = null; if (cell.getTagsLength() > 0) { Iterator tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), @@ -293,7 +294,7 @@ public class VisibilityUtils { return serializationFormat; } - public static boolean isVisibilityTagsPresent(Cell cell) { + public static boolean isVisibilityTagsPresent(ServerCell cell) { if (cell.getTagsLength() == 0) { return false; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java index 3b84602..e97a714 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.SortedSet; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.regionserver.NonReversedNonLazyKeyValueScanner; /** @@ -35,27 +35,27 @@ import org.apache.hadoop.hbase.regionserver.NonReversedNonLazyKeyValueScanner; */ @InterfaceAudience.Private public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner { - final private Iterable data; + final private Iterable data; final KeyValue.KVComparator comparator; - private Iterator iter; - private Cell current; + private Iterator iter; + private ServerCell current; - public CollectionBackedScanner(SortedSet set) { + public CollectionBackedScanner(SortedSet set) { this(set, KeyValue.COMPARATOR); } - public CollectionBackedScanner(SortedSet set, + public CollectionBackedScanner(SortedSet set, KeyValue.KVComparator comparator) { this.comparator = comparator; data = set; init(); } - public CollectionBackedScanner(List list) { + public CollectionBackedScanner(List list) { this(list, KeyValue.COMPARATOR); } - public CollectionBackedScanner(List list, + public CollectionBackedScanner(List list, KeyValue.KVComparator comparator) { Collections.sort(list, comparator); this.comparator = comparator; @@ -64,10 +64,10 @@ public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner { } public CollectionBackedScanner(KeyValue.KVComparator comparator, - Cell... array) { + ServerCell... array) { this.comparator = comparator; - List tmp = new ArrayList(array.length); + List tmp = new ArrayList(array.length); Collections.addAll(tmp, array); Collections.sort(tmp, comparator); data = tmp; @@ -82,13 +82,13 @@ public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner { } @Override - public Cell peek() { + public ServerCell peek() { return current; } @Override - public Cell next() { - Cell oldCurrent = current; + public ServerCell next() { + ServerCell oldCurrent = current; if(iter.hasNext()){ current = iter.next(); } else { @@ -98,16 +98,16 @@ public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner { } @Override - public boolean seek(Cell seekCell) { + public boolean seek(ServerCell seekCell) { // restart iterator iter = data.iterator(); return reseek(seekCell); } @Override - public boolean reseek(Cell seekCell) { + public boolean reseek(ServerCell seekCell) { while(iter.hasNext()){ - Cell next = iter.next(); + ServerCell next = iter.next(); int ret = comparator.compare(next, seekCell); if(ret >= 0){ current = next; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java index cdef12f..43da453 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter; import org.apache.hadoop.hbase.io.hfile.CacheConfig; @@ -127,7 +128,7 @@ public class CompressionTest { .create(); // Write any-old Cell... final byte [] rowKey = Bytes.toBytes("compressiontestkey"); - Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval")); + ServerCell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval")); writer.append(c); writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval")); writer.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index 7d1ff0d..950873f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -525,7 +525,7 @@ public abstract class HBaseTestCase extends TestCase { public interface ScannerIncommon extends Iterable { - boolean next(List values) + boolean next(List values) throws IOException; void close() throws IOException; @@ -538,14 +538,16 @@ public abstract class HBaseTestCase extends TestCase { } @Override - public boolean next(List values) + public boolean next(List values) throws IOException { Result results = scanner.next(); if (results == null) { return false; } values.clear(); - values.addAll(results.listCells()); + for (Cell cell : results.listCells()) { + values.add((ServerCell) cell); + } return true; } @@ -566,7 +568,7 @@ public abstract class HBaseTestCase extends TestCase { } @Override - public boolean next(List results) + public boolean next(List results) throws IOException { return NextState.hasMoreValues(scanner.next(results)); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index ff79569..492c3f0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -3338,7 +3338,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * Do a small get/scan against one store. This is required because store * has no actual methods of querying itself, and relies on StoreScanner. */ - public static List getFromStoreFile(HStore store, + public static List getFromStoreFile(HStore store, Get get) throws IOException { Scan scan = new Scan(get); InternalScanner scanner = (InternalScanner) store.getScanner(scan, @@ -3347,7 +3347,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { // readpoint 0. 0); - List result = new ArrayList(); + List result = new ArrayList(); scanner.next(result); if (!result.isEmpty()) { // verify that we are on the row we want: @@ -3381,7 +3381,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * Do a small get/scan against one store. This is required because store * has no actual methods of querying itself, and relies on StoreScanner. */ - public static List getFromStoreFile(HStore store, + public static List getFromStoreFile(HStore store, byte [] row, NavigableSet columns ) throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java index ea10f60..530f25e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java @@ -84,15 +84,15 @@ public class HFilePerformanceEvaluation { * @param value Value to use * @return Created Cell. */ - static Cell createCell(final int i, final byte [] value) { + static ServerCell createCell(final int i, final byte [] value) { return createCell(format(i), value); } - static Cell createCell(final byte [] keyRow) { + static ServerCell createCell(final byte [] keyRow) { return CellUtil.createCell(keyRow); } - static Cell createCell(final byte [] keyRow, final byte [] value) { + static ServerCell createCell(final byte [] keyRow, final byte [] value) { return CellUtil.createCell(keyRow, value); } @@ -465,7 +465,7 @@ public class HFilePerformanceEvaluation { HFileScanner scanner = this.reader.getScanner(false, false); byte [] b = getRandomRow(); // System.out.println("Random row: " + new String(b)); - Cell c = createCell(b); + ServerCell c = createCell(b); if (scanner.seekTo(c) != 0) { LOG.info("Nonexistent row: " + new String(b)); return; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java index 072098e..2b53237 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java @@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.client; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -61,7 +61,7 @@ public class TestClientOperationInterrupt { public static class TestCoprocessor extends BaseRegionObserver { @Override public void preGetOp(final ObserverContext e, - final Get get, final List results) throws IOException { + final Get get, final List results) throws IOException { Threads.sleep(2500); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java index 53b36e3..bd1dc42 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java @@ -45,13 +45,13 @@ import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.RegionLocations; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter; @@ -119,7 +119,7 @@ public class TestHCM { @Override public void preGetOp(final ObserverContext e, - final Get get, final List results) throws IOException { + final Get get, final List results) throws IOException { Threads.sleep(20000); if (ct.incrementAndGet() == 1){ throw new IOException("first call I fail"); @@ -559,7 +559,7 @@ public class TestHCM { return false; } @Override - public ReturnCode filterKeyValue(Cell ignored) throws IOException { + public ReturnCode filterKeyValue(ServerCell ignored) throws IOException { return ReturnCode.INCLUDE; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java index cdfb774..c2af16d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTestConst; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner.NextState; @@ -92,8 +93,8 @@ public class TestIntraRowPagination { scan.setRowOffsetPerColumnFamily(storeOffset); scan.setMaxResultsPerColumnFamily(storeLimit); RegionScanner scanner = region.getScanner(scan); - List kvListScan = new ArrayList(); - List results = new ArrayList(); + List kvListScan = new ArrayList(); + List results = new ArrayList(); while (NextState.hasMoreValues(scanner.next(results)) || !results.isEmpty()) { kvListScan.addAll(results); results.clear(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index b0bd6f6..a19d846 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -24,12 +24,12 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; @@ -87,7 +87,7 @@ public class TestReplicaWithCluster { @Override public void preGetOp(final ObserverContext e, - final Get get, final List results) throws IOException { + final Get get, final List results) throws IOException { if (e.getEnvironment().getRegion().getRegionInfo().getReplicaId() == 0) { CountDownLatch latch = cdl.get(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java index efc8db2..5ca99cd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java @@ -37,13 +37,13 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RegionLocations; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture; import org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl; @@ -110,7 +110,7 @@ public class TestReplicasClient { @Override public void preGetOp(final ObserverContext e, - final Get get, final List results) throws IOException { + final Get get, final List results) throws IOException { slowdownCode(e); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java index 68053c0..772db56 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java @@ -24,10 +24,10 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.ColumnAggregationService; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest; @@ -85,12 +85,12 @@ implements Coprocessor, CoprocessorService { InternalScanner scanner = null; try { scanner = this.env.getRegion().getScanner(scan); - List curVals = new ArrayList(); + List curVals = new ArrayList(); boolean hasMore = false; do { curVals.clear(); hasMore = NextState.hasMoreValues(scanner.next(curVals)); - for (Cell kv : curVals) { + for (ServerCell kv : curVals) { if (CellUtil.matchingQualifier(kv, qualifier)) { sumResult += Bytes.toInt(kv.getValueArray(), kv.getValueOffset()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java index c9a628a..0e5f012 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java @@ -23,11 +23,11 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationServiceNullResponse; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest; @@ -94,12 +94,12 @@ implements Coprocessor, CoprocessorService { return; } scanner = region.getScanner(scan); - List curVals = new ArrayList(); + List curVals = new ArrayList(); boolean hasMore = false; do { curVals.clear(); hasMore = NextState.hasMoreValues(scanner.next(curVals)); - for (Cell kv : curVals) { + for (ServerCell kv : curVals) { if (CellUtil.matchingQualifier(kv, qualifier)) { sumResult += Bytes.toInt(kv.getValueArray(), kv.getValueOffset()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java index 0c4d076..a205662 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest; @@ -94,7 +95,7 @@ implements Coprocessor, CoprocessorService { throw new DoNotRetryIOException("An expected exception"); } scanner = region.getScanner(scan); - List curVals = new ArrayList(); + List curVals = new ArrayList(); boolean hasMore = false; do { curVals.clear(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java index f4981f1..7eb2a52 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -329,7 +330,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { @Override public void preGetOp(final ObserverContext c, final Get get, - final List results) throws IOException { + final List results) throws IOException { RegionCoprocessorEnvironment e = c.getEnvironment(); assertNotNull(e); assertNotNull(e.getRegion()); @@ -340,7 +341,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { @Override public void postGetOp(final ObserverContext c, final Get get, - final List results) { + final List results) { RegionCoprocessorEnvironment e = c.getEnvironment(); assertNotNull(e); assertNotNull(e.getRegion()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java index 6deade8..e97e9c9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java @@ -39,7 +39,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseTestCase; @@ -49,6 +48,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Scan; @@ -86,34 +86,34 @@ public class TestCoprocessorInterface { } @Override - public NextState next(List results) throws IOException { + public NextState next(List results) throws IOException { return delegate.next(results); } @Override - public NextState next(List result, int limit) throws IOException { + public NextState next(List result, int limit) throws IOException { return delegate.next(result, limit); } @Override - public NextState next(List result, int limit, long remainingResultSize) + public NextState next(List result, int limit, long remainingResultSize) throws IOException { return delegate.next(result, limit, remainingResultSize); } @Override - public NextState nextRaw(List result) + public NextState nextRaw(List result) throws IOException { return delegate.nextRaw(result); } @Override - public NextState nextRaw(List result, int limit) throws IOException { + public NextState nextRaw(List result, int limit) throws IOException { return delegate.nextRaw(result, limit); } @Override - public NextState nextRaw(List result, int limit, long remainingResultSize) + public NextState nextRaw(List result, int limit, long remainingResultSize) throws IOException { return delegate.nextRaw(result, limit, remainingResultSize); } @@ -281,7 +281,7 @@ public class TestCoprocessorInterface { } @Override public void preGetOp(final ObserverContext e, - final Get get, final List results) throws IOException { + final Get get, final List results) throws IOException { if (1/0 == 1) { e.complete(); } @@ -401,7 +401,7 @@ public class TestCoprocessorInterface { RegionScanner scanner = regions[0].getCoprocessorHost().postScannerOpen(s, regions[0].getScanner(s)); assertTrue(scanner instanceof CustomScanner); // this would throw an exception before HBASE-4197 - scanner.next(new ArrayList()); + scanner.next(new ArrayList()); assertTrue("Coprocessor not started", ((CoprocessorImpl)c).wasStarted()); assertTrue("Coprocessor not stopped", ((CoprocessorImpl)c).wasStopped()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java index a4963ae..0e909cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java @@ -34,7 +34,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -43,6 +42,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MiniHBaseCluster; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; @@ -433,19 +433,19 @@ public class TestRegionObserverInterface { Store store, final InternalScanner scanner, final ScanType scanType) { return new InternalScanner() { @Override - public NextState next(List results) throws IOException { + public NextState next(List results) throws IOException { return next(results, -1); } @Override - public NextState next(List results, int limit) throws IOException { + public NextState next(List results, int limit) throws IOException { return next(results, limit, -1); } @Override - public NextState next(List results, int limit, long remainingResultSize) + public NextState next(List results, int limit, long remainingResultSize) throws IOException { - List internalResults = new ArrayList(); + List internalResults = new ArrayList(); boolean hasMore; NextState state; do { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java index 00808bd..75cb9ee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java @@ -31,7 +31,6 @@ import java.util.concurrent.CountDownLatch; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -39,6 +38,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; @@ -76,7 +76,7 @@ public class TestRegionObserverScannerOpenHook { public static class NoDataFilter extends FilterBase { @Override - public ReturnCode filterKeyValue(Cell ignored) throws IOException { + public ReturnCode filterKeyValue(ServerCell ignored) throws IOException { return ReturnCode.SKIP; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java index 2136c3c..39f1caa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.IsolationLevel; @@ -334,7 +335,7 @@ public class TestRowProcessorEndpoint { public void process(long now, HRegion region, List mutations, WALEdit walEdit) throws IOException { // Scan current counter - List kvs = new ArrayList(); + List kvs = new ArrayList(); Scan scan = new Scan(row, row); scan.addColumn(FAM, COUNTER); doScan(region, scan, kvs); @@ -417,7 +418,7 @@ public class TestRowProcessorEndpoint { @Override public void process(long now, HRegion region, List mutations, WALEdit walEdit) throws IOException { - List kvs = new ArrayList(); + List kvs = new ArrayList(); { // First scan to get friends of the person Scan scan = new Scan(row, row); scan.addColumn(FAM, person); @@ -426,7 +427,7 @@ public class TestRowProcessorEndpoint { // Second scan to get friends of friends Scan scan = new Scan(row, row); - for (Cell kv : kvs) { + for (ServerCell kv : kvs) { byte[] friends = CellUtil.cloneValue(kv); for (byte f : friends) { scan.addColumn(FAM, new byte[]{f}); @@ -507,8 +508,8 @@ public class TestRowProcessorEndpoint { now = myTimer.getAndIncrement(); // Scan both rows - List kvs1 = new ArrayList(); - List kvs2 = new ArrayList(); + List kvs1 = new ArrayList(); + List kvs2 = new ArrayList(); doScan(region, new Scan(row1, row1), kvs1); doScan(region, new Scan(row2, row2), kvs2); @@ -523,7 +524,7 @@ public class TestRowProcessorEndpoint { swapped = !swapped; // Add and delete keyvalues - List> kvs = new ArrayList>(); + List> kvs = new ArrayList>(); kvs.add(kvs1); kvs.add(kvs2); byte[][] rows = new byte[][]{row1, row2}; @@ -627,7 +628,7 @@ public class TestRowProcessorEndpoint { } public static void doScan( - HRegion region, Scan scan, List result) throws IOException { + HRegion region, Scan scan, List result) throws IOException { InternalScanner scanner = null; try { scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java index a104def..b61eff8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.exceptions.DeserializationException; public class FilterAllFilter extends FilterBase { @@ -28,7 +28,7 @@ public class FilterAllFilter extends FilterBase { } @Override - public ReturnCode filterKeyValue(Cell v) throws IOException { + public ReturnCode filterKeyValue(ServerCell v) throws IOException { return ReturnCode.SKIP; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java index abd9921..c5328c7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java @@ -27,13 +27,13 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; @@ -66,13 +66,13 @@ public class TestColumnPrefixFilter { List columns = generateRandomWords(10000, "column"); long maxTimestamp = 2; - List kvList = new ArrayList(); + List kvList = new ArrayList(); - Map> prefixMap = new HashMap>(); + Map> prefixMap = new HashMap>(); - prefixMap.put("p", new ArrayList()); - prefixMap.put("s", new ArrayList()); + prefixMap.put("p", new ArrayList()); + prefixMap.put("s", new ArrayList()); String valueString = "ValueString"; @@ -104,7 +104,7 @@ public class TestColumnPrefixFilter { scan.setFilter(filter); InternalScanner scanner = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); while (NextState.hasMoreValues(scanner.next(results))); assertEquals(prefixMap.get(s).size(), results.size()); } @@ -128,13 +128,13 @@ public class TestColumnPrefixFilter { List columns = generateRandomWords(10000, "column"); long maxTimestamp = 2; - List kvList = new ArrayList(); + List kvList = new ArrayList(); - Map> prefixMap = new HashMap>(); + Map> prefixMap = new HashMap>(); - prefixMap.put("p", new ArrayList()); - prefixMap.put("s", new ArrayList()); + prefixMap.put("p", new ArrayList()); + prefixMap.put("s", new ArrayList()); String valueString = "ValueString"; @@ -169,7 +169,7 @@ public class TestColumnPrefixFilter { scan.setFilter(filterList); InternalScanner scanner = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); while (NextState.hasMoreValues(scanner.next(results))); assertEquals(prefixMap.get(s).size(), results.size()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java index 97f0874..c63b5f0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java @@ -30,12 +30,12 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; @@ -147,7 +147,7 @@ public class TestDependentColumnFilter { private void verifyScan(Scan s, long expectedRows, long expectedCells) throws IOException { InternalScanner scanner = this.region.getScanner(s); - List results = new ArrayList(); + List results = new ArrayList(); int i = 0; int cells = 0; for (boolean done = true; done; i++) { @@ -231,8 +231,8 @@ public class TestDependentColumnFilter { @Test public void testFilterDropping() throws Exception { Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER); - List accepted = new ArrayList(); - for(Cell val : testVals) { + List accepted = new ArrayList(); + for(KeyValue val : testVals) { if(filter.filterKeyValue(val) == ReturnCode.INCLUDE) { accepted.add(val); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index 82ea5d4..0098a78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -270,22 +271,22 @@ public class TestFilter { // reseek to row three. scanner.reseek(ROWS_THREE[1]); - List results = new ArrayList(); + List results = new ArrayList(); // the results should belong to ROWS_THREE[1] scanner.next(results); - for (Cell keyValue : results) { + for (ServerCell keyValue : results) { assertTrue("The rows with ROWS_TWO as row key should be appearing.", CellUtil.matchingRow(keyValue, ROWS_THREE[1])); } // again try to reseek to a value before ROWS_THREE[1] scanner.reseek(ROWS_ONE[1]); - results = new ArrayList(); + results = new ArrayList(); // This time no seek would have been done to ROWS_ONE[1] scanner.next(results); - for (Cell keyValue : results) { + for (ServerCell cell : results) { assertFalse("Cannot rewind back to a value less than previous reseek.", - Bytes.toString(CellUtil.cloneRow(keyValue)).contains("testRowOne")); + Bytes.toString(CellUtil.cloneRow(cell)).contains("testRowOne")); } } @@ -503,7 +504,7 @@ public class TestFilter { InternalScanner scanner = this.region.getScanner(s); int scannerCounter = 0; while (true) { - boolean isMoreResults = NextState.hasMoreValues(scanner.next(new ArrayList())); + boolean isMoreResults = NextState.hasMoreValues(scanner.next(new ArrayList())); scannerCounter++; if (scannerCounter >= pageSize) { @@ -531,7 +532,7 @@ public class TestFilter { InternalScanner scanner = this.region.getScanner(s); while (true) { - ArrayList values = new ArrayList(); + ArrayList values = new ArrayList(); boolean isMoreResults = NextState.hasMoreValues(scanner.next(values)); if (!isMoreResults || !Bytes.toString(values.get(0).getRow()).startsWith(prefix)) { @@ -566,7 +567,7 @@ public class TestFilter { InternalScanner scanner = this.region.getScanner(s); int scannerCounter = 0; while (true) { - boolean isMoreResults = NextState.hasMoreValues(scanner.next(new ArrayList())); + boolean isMoreResults = NextState.hasMoreValues(scanner.next(new ArrayList())); scannerCounter++; if (scannerCounter >= pageSize) { @@ -600,7 +601,7 @@ public class TestFilter { } @Override - public ReturnCode filterKeyValue(Cell ignored) throws IOException { + public ReturnCode filterKeyValue(ServerCell ignored) throws IOException { return ReturnCode.INCLUDE; } } @@ -620,7 +621,7 @@ public class TestFilter { s.setFilter(filter); InternalScanner scanner = this.region.getScanner(s); - ArrayList values = new ArrayList(); + ArrayList values = new ArrayList(); scanner.next(values); assertTrue("All rows should be filtered out", values.isEmpty()); } @@ -643,7 +644,7 @@ public class TestFilter { InternalScanner scanner = this.region.getScanner(s); while (true) { - ArrayList values = new ArrayList(); + ArrayList values = new ArrayList(); boolean isMoreResults = NextState.hasMoreValues(scanner.next(values)); if (!isMoreResults || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) { assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); @@ -672,7 +673,7 @@ public class TestFilter { InternalScanner scanner = this.region.getScanner(s); while (true) { - ArrayList values = new ArrayList(); + ArrayList values = new ArrayList(); boolean isMoreResults = NextState.hasMoreValues(scanner.next(values)); assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); if (!isMoreResults) { @@ -1474,7 +1475,7 @@ public class TestFilter { Scan s1 = new Scan(); s1.setFilter(filterList); InternalScanner scanner = testRegion.getScanner(s1); - List results = new ArrayList(); + List results = new ArrayList(); int resultCount = 0; while (NextState.hasMoreValues(scanner.next(results))) { resultCount++; @@ -1615,7 +1616,7 @@ public class TestFilter { private void verifyScan(Scan s, long expectedRows, long expectedKeys) throws IOException { InternalScanner scanner = this.region.getScanner(s); - List results = new ArrayList(); + List results = new ArrayList(); int i = 0; for (boolean done = true; done; i++) { done = NextState.hasMoreValues(scanner.next(results)); @@ -1637,7 +1638,7 @@ public class TestFilter { long expectedKeys) throws IOException { InternalScanner scanner = this.region.getScanner(s); - List results = new ArrayList(); + List results = new ArrayList(); int i = 0; for (boolean done = true; done; i++) { done = NextState.hasMoreValues(scanner.next(results)); @@ -1658,7 +1659,7 @@ public class TestFilter { private void verifyScanFull(Scan s, KeyValue [] kvs) throws IOException { InternalScanner scanner = this.region.getScanner(s); - List results = new ArrayList(); + List results = new ArrayList(); int row = 0; int idx = 0; for (boolean done = true; done; row++) { @@ -1670,7 +1671,7 @@ public class TestFilter { " total but already scanned " + (results.size() + idx) + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"), kvs.length >= idx + results.size()); - for (Cell kv : results) { + for (ServerCell kv : results) { LOG.info("row=" + row + ", result=" + kv.toString() + ", match=" + kvs[idx].toString()); assertTrue("Row mismatch", CellUtil.matchingRow(kv, kvs[idx])); @@ -1689,7 +1690,7 @@ public class TestFilter { private void verifyScanFullNoValues(Scan s, KeyValue [] kvs, boolean useLen) throws IOException { InternalScanner scanner = this.region.getScanner(s); - List results = new ArrayList(); + List results = new ArrayList(); int row = 0; int idx = 0; for (boolean more = true; more; row++) { @@ -1701,7 +1702,7 @@ public class TestFilter { " total but already scanned " + (results.size() + idx) + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"), kvs.length >= idx + results.size()); - for(Cell kv : results) { + for(ServerCell kv : results) { LOG.info("row=" + row + ", result=" + kv.toString() + ", match=" + kvs[idx].toString()); @@ -1989,7 +1990,7 @@ public class TestFilter { public byte [] toByteArray() {return null;} @Override - public ReturnCode filterKeyValue(Cell ignored) throws IOException { + public ReturnCode filterKeyValue(ServerCell ignored) throws IOException { return ReturnCode.INCLUDE; } @@ -2026,7 +2027,7 @@ public class TestFilter { Scan s1 = new Scan(); s1.setFilter(rowFilter); InternalScanner scanner = testRegion.getScanner(s1); - List results = new ArrayList(); + List results = new ArrayList(); int i = 5; for (boolean done = true; done; i++) { done = NextState.hasMoreValues(scanner.next(results)); @@ -2045,7 +2046,7 @@ public class TestFilter { s1 = new Scan(); s1.setFilter(subFilterList); scanner = testRegion.getScanner(s1); - results = new ArrayList(); + results = new ArrayList(); for (i=0; i<=4; i+=2) { scanner.next(results); assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i))); @@ -2061,7 +2062,7 @@ public class TestFilter { s1 = new Scan(); s1.setFilter(filterList); scanner = testRegion.getScanner(s1); - results = new ArrayList(); + results = new ArrayList(); for (i=0; i<=4; i+=2) { scanner.next(results); assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i))); @@ -2082,7 +2083,7 @@ public class TestFilter { s1 = new Scan(); s1.setFilter(filterList); scanner = testRegion.getScanner(s1); - results = new ArrayList(); + results = new ArrayList(); for (i=0; i<=4; i+=2) { scanner.next(results); assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index 759435b..ce97e8f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -28,9 +28,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter.ReturnCode; @@ -315,7 +315,7 @@ public class TestFilterList { super(); } @Override - public ReturnCode filterKeyValue(Cell v) { + public ReturnCode filterKeyValue(ServerCell v) { return ReturnCode.NEXT_COL; } public static AlwaysNextColFilter parseFrom(final byte [] pbBytes) @@ -355,7 +355,7 @@ public class TestFilterList { public void testFilterKeyValue() throws Exception { Filter includeFilter = new FilterBase() { @Override - public Filter.ReturnCode filterKeyValue(Cell v) { + public Filter.ReturnCode filterKeyValue(ServerCell v) { return Filter.ReturnCode.INCLUDE; } }; @@ -364,7 +364,7 @@ public class TestFilterList { boolean returnInclude = true; @Override - public Filter.ReturnCode filterKeyValue(Cell v) { + public Filter.ReturnCode filterKeyValue(ServerCell v) { Filter.ReturnCode returnCode = returnInclude ? Filter.ReturnCode.INCLUDE : Filter.ReturnCode.SKIP; returnInclude = !returnInclude; @@ -376,7 +376,7 @@ public class TestFilterList { boolean returnIncludeOnly = false; @Override - public Filter.ReturnCode filterKeyValue(Cell v) { + public Filter.ReturnCode filterKeyValue(ServerCell v) { Filter.ReturnCode returnCode = returnIncludeOnly ? Filter.ReturnCode.INCLUDE : Filter.ReturnCode.INCLUDE_AND_NEXT_COL; returnIncludeOnly = !returnIncludeOnly; @@ -418,19 +418,19 @@ public class TestFilterList { } @Override - public ReturnCode filterKeyValue(Cell ignored) throws IOException { + public ReturnCode filterKeyValue(ServerCell ignored) throws IOException { return ReturnCode.INCLUDE; } }; Filter filterMinHint = new FilterBase() { @Override - public ReturnCode filterKeyValue(Cell ignored) { + public ReturnCode filterKeyValue(ServerCell ignored) { return ReturnCode.SEEK_NEXT_USING_HINT; } @Override - public Cell getNextCellHint(Cell currentKV) { + public ServerCell getNextCellHint(ServerCell currentKV) { return minKeyValue; } @@ -440,12 +440,12 @@ public class TestFilterList { Filter filterMaxHint = new FilterBase() { @Override - public ReturnCode filterKeyValue(Cell ignored) { + public ReturnCode filterKeyValue(ServerCell ignored) { return ReturnCode.SEEK_NEXT_USING_HINT; } @Override - public Cell getNextCellHint(Cell cell) { + public ServerCell getNextCellHint(ServerCell cell) { return new KeyValue(Bytes.toBytes(Long.MAX_VALUE), null, null); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java index b88bbbf..11c7cc7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java @@ -22,12 +22,12 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -137,8 +137,8 @@ public class TestInvocationRecordFilter { } Scan scan = new Scan(get); - List actualValues = new ArrayList(); - List temp = new ArrayList(); + List actualValues = new ArrayList(); + List temp = new ArrayList(); InternalScanner scanner = this.region.getScanner(scan); while (NextState.hasMoreValues(scanner.next(temp))) { actualValues.addAll(temp); @@ -162,18 +162,18 @@ public class TestInvocationRecordFilter { */ private static class InvocationRecordFilter extends FilterBase { - private List visitedKeyValues = new ArrayList(); + private List visitedKeyValues = new ArrayList(); public void reset() { visitedKeyValues.clear(); } - public ReturnCode filterKeyValue(Cell ignored) { + public ReturnCode filterKeyValue(ServerCell ignored) { visitedKeyValues.add(ignored); return ReturnCode.INCLUDE; } - public void filterRowCells(List kvs) { + public void filterRowCells(List kvs) { kvs.clear(); kvs.addAll(visitedKeyValues); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java index 25f2e88..31a7155 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java @@ -27,13 +27,13 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; @@ -69,14 +69,14 @@ public class TestMultipleColumnPrefixFilter { List columns = generateRandomWords(10000, "column"); long maxTimestamp = 2; - List kvList = new ArrayList(); + List kvList = new ArrayList(); - Map> prefixMap = new HashMap>(); + Map> prefixMap = new HashMap>(); - prefixMap.put("p", new ArrayList()); - prefixMap.put("q", new ArrayList()); - prefixMap.put("s", new ArrayList()); + prefixMap.put("p", new ArrayList()); + prefixMap.put("q", new ArrayList()); + prefixMap.put("s", new ArrayList()); String valueString = "ValueString"; @@ -108,7 +108,7 @@ public class TestMultipleColumnPrefixFilter { filter = new MultipleColumnPrefixFilter(filter_prefix); scan.setFilter(filter); - List results = new ArrayList(); + List results = new ArrayList(); InternalScanner scanner = region.getScanner(scan); while (NextState.hasMoreValues(scanner.next(results))) ; @@ -136,14 +136,14 @@ public class TestMultipleColumnPrefixFilter { List columns = generateRandomWords(10000, "column"); long maxTimestamp = 3; - List kvList = new ArrayList(); + List kvList = new ArrayList(); - Map> prefixMap = new HashMap>(); + Map> prefixMap = new HashMap>(); - prefixMap.put("p", new ArrayList()); - prefixMap.put("q", new ArrayList()); - prefixMap.put("s", new ArrayList()); + prefixMap.put("p", new ArrayList()); + prefixMap.put("q", new ArrayList()); + prefixMap.put("s", new ArrayList()); String valueString = "ValueString"; @@ -153,7 +153,7 @@ public class TestMultipleColumnPrefixFilter { for (String column: columns) { for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) { double rand = Math.random(); - Cell kv; + ServerCell kv; if (rand < 0.5) kv = KeyValueTestUtil.create(row, family1, column, timestamp, valueString); @@ -181,7 +181,7 @@ public class TestMultipleColumnPrefixFilter { filter = new MultipleColumnPrefixFilter(filter_prefix); scan.setFilter(filter); - List results = new ArrayList(); + List results = new ArrayList(); InternalScanner scanner = region.getScanner(scan); while (NextState.hasMoreValues(scanner.next(results))) ; @@ -226,7 +226,7 @@ public class TestMultipleColumnPrefixFilter { multiplePrefixFilter = new MultipleColumnPrefixFilter(filter_prefix); scan1.setFilter(multiplePrefixFilter); - List results1 = new ArrayList(); + List results1 = new ArrayList(); InternalScanner scanner1 = region.getScanner(scan1); while (NextState.hasMoreValues(scanner1.next(results1))) ; @@ -237,7 +237,7 @@ public class TestMultipleColumnPrefixFilter { singlePrefixFilter = new ColumnPrefixFilter(Bytes.toBytes("p")); scan2.setFilter(singlePrefixFilter); - List results2 = new ArrayList(); + List results2 = new ArrayList(); InternalScanner scanner2 = region.getScanner(scan1); while (NextState.hasMoreValues(scanner2.next(results2))) ; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java index 7aa298c..ee8a987 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hbase.filter; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -57,7 +57,7 @@ public class TestSingleColumnValueExcludeFilter { CompareOp.EQUAL, VAL_1); // A 'match' situation - List kvs = new ArrayList(); + List kvs = new ArrayList(); KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1); kvs.add (new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java index 1eda567..3da567b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -115,7 +116,7 @@ public class TestPrefixTree { scan.setStopRow(Bytes.toBytes("a-b-A-1:")); RegionScanner scanner = region.getScanner(scan); - List cells = new ArrayList(); + List cells = new ArrayList(); for (int i = 0; i < 3; i++) { assertEquals(i < 2, NextState.hasMoreValues(scanner.next(cells))); CellScanner cellScanner = Result.create(cells).cellScanner(); @@ -185,7 +186,7 @@ public class TestPrefixTree { region.flush(true); Scan scan = new Scan(Bytes.toBytes("obj29995")); RegionScanner scanner = region.getScanner(scan); - List cells = new ArrayList(); + List cells = new ArrayList(); assertFalse(NextState.hasMoreValues(scanner.next(cells))); assertArrayEquals(Bytes.toBytes("obj3"), Result.create(cells).getRow()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java index ee664bd..1ea87a7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java @@ -35,9 +35,9 @@ import java.util.concurrent.ConcurrentSkipListSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; @@ -69,7 +69,7 @@ public class TestPrefixTreeEncoding { private static final int NUM_COLS_PER_ROW = 20; private int numBatchesWritten = 0; - private ConcurrentSkipListSet kvset = new ConcurrentSkipListSet( + private ConcurrentSkipListSet kvset = new ConcurrentSkipListSet( KeyValue.COMPARATOR); private static boolean formatRowNum = false; @@ -163,9 +163,9 @@ public class TestPrefixTreeEncoding { ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, onDiskBytes.length - DataBlockEncoding.ID_SIZE); seeker.setCurrentBuffer(readBuffer); - Cell previousKV = null; + ServerCell previousKV = null; do { - Cell currentKV = seeker.getKeyValue(); + ServerCell currentKV = seeker.getKeyValue(); System.out.println(currentKV); if (previousKV != null && KeyValue.COMPARATOR.compare(currentKV, previousKV) < 0) { dumpInputKVSet(); @@ -257,18 +257,18 @@ public class TestPrefixTreeEncoding { private void dumpInputKVSet() { LOG.info("Dumping input keyvalue set in error case:"); - for (Cell kv : kvset) { + for (ServerCell kv : kvset) { System.out.println(kv); } } - private static void generateFixedTestData(ConcurrentSkipListSet kvset, int batchId, + private static void generateFixedTestData(ConcurrentSkipListSet kvset, int batchId, boolean useTags, PrefixTreeCodec encoder, HFileBlockEncodingContext blkEncodingCtx, DataOutputStream userDataStream) throws Exception { generateFixedTestData(kvset, batchId, true, useTags, encoder, blkEncodingCtx, userDataStream); } - private static void generateFixedTestData(ConcurrentSkipListSet kvset, + private static void generateFixedTestData(ConcurrentSkipListSet kvset, int batchId, boolean partial, boolean useTags, PrefixTreeCodec encoder, HFileBlockEncodingContext blkEncodingCtx, DataOutputStream userDataStream) throws Exception { for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) { @@ -287,13 +287,13 @@ public class TestPrefixTreeEncoding { } } encoder.startBlockEncoding(blkEncodingCtx, userDataStream); - for (Cell kv : kvset) { + for (ServerCell kv : kvset) { encoder.encode(kv, blkEncodingCtx, userDataStream); } encoder.endBlockEncoding(blkEncodingCtx, userDataStream, null); } - private static void generateRandomTestData(ConcurrentSkipListSet kvset, + private static void generateRandomTestData(ConcurrentSkipListSet cells, int batchId, boolean useTags, PrefixTreeCodec encoder, HFileBlockEncodingContext blkEncodingCtx, DataOutputStream userDataStream) throws Exception { Random random = new Random(); @@ -306,16 +306,16 @@ public class TestPrefixTreeEncoding { if (!useTags) { KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), getValue( batchId, i, j)); - kvset.add(kv); + cells.add(kv); } else { KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), 0l, getValue(batchId, i, j), new Tag[] { new Tag((byte) 1, "metaValue1") }); - kvset.add(kv); + cells.add(kv); } } } encoder.startBlockEncoding(blkEncodingCtx, userDataStream); - for (Cell kv : kvset) { + for (ServerCell kv : cells) { encoder.encode(kv, blkEncodingCtx, userDataStream); } encoder.endBlockEncoding(blkEncodingCtx, userDataStream, null); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java index fc44f3c..a870868 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java @@ -36,11 +36,11 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.compress.Compression; @@ -474,7 +474,7 @@ public class TestHFileBlockCompatibility { } @Override - public void write(Cell c) throws IOException { + public void write(ServerCell c) throws IOException { KeyValue kv = KeyValueUtil.ensureKeyValue(c); expectState(State.WRITING); this.dataBlockEncoder.encode(kv, dataBlockEncodingCtx, this.userDataStream); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java index 6baadbb..61812d1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java @@ -27,11 +27,11 @@ import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; @@ -121,7 +121,7 @@ public class TestScannerSelectionUsingKeyRange { LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache(); cache.clearCache(); InternalScanner scanner = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); while (NextState.hasMoreValues(scanner.next(results))) { } scanner.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index 4e0743d..ac47627 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -27,11 +27,11 @@ import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; @@ -136,7 +136,7 @@ public class TestScannerSelectionUsingTTL { LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache(); cache.clearCache(); InternalScanner scanner = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); final int expectedKVsPerRow = numFreshFiles * NUM_COLS_PER_ROW; int numReturnedRows = 0; LOG.info("Scanning the entire table"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java index 6c801bf..3054cd5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java @@ -23,11 +23,11 @@ import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.LruBlockCache; @@ -55,8 +55,8 @@ public class EncodedSeekPerformanceTest { numberOfSeeks = DEFAULT_NUMBER_OF_SEEKS; } - private List prepareListOfTestSeeks(Path path) throws IOException { - List allKeyValues = new ArrayList(); + private List prepareListOfTestSeeks(Path path) throws IOException { + List allKeyValues = new ArrayList(); // read all of the key values StoreFile storeFile = new StoreFile(testingUtility.getTestFileSystem(), @@ -64,7 +64,7 @@ public class EncodedSeekPerformanceTest { StoreFile.Reader reader = storeFile.createReader(); StoreFileScanner scanner = reader.getStoreFileScanner(true, false); - Cell current; + ServerCell current; scanner.seek(KeyValue.LOWESTKEY); while (null != (current = scanner.next())) { @@ -74,9 +74,9 @@ public class EncodedSeekPerformanceTest { storeFile.closeReader(cacheConf.shouldEvictOnClose()); // pick seeks by random - List seeks = new ArrayList(); + List seeks = new ArrayList(); for (int i = 0; i < numberOfSeeks; ++i) { - Cell keyValue = allKeyValues.get( + ServerCell keyValue = allKeyValues.get( randomizer.nextInt(allKeyValues.size())); seeks.add(keyValue); } @@ -87,7 +87,7 @@ public class EncodedSeekPerformanceTest { } private void runTest(Path path, DataBlockEncoding blockEncoding, - List seeks) throws IOException { + List seeks) throws IOException { // read all of the key values StoreFile storeFile = new StoreFile(testingUtility.getTestFileSystem(), path, configuration, cacheConf, BloomType.NONE); @@ -98,7 +98,7 @@ public class EncodedSeekPerformanceTest { StoreFileScanner scanner = reader.getStoreFileScanner(true, false); long startReadingTime = System.nanoTime(); - Cell current; + ServerCell current; scanner.seek(KeyValue.LOWESTKEY); while (null != (current = scanner.next())) { // just iterate it! if (KeyValueUtil.ensureKeyValue(current).getLength() < 0) { @@ -110,9 +110,9 @@ public class EncodedSeekPerformanceTest { // do seeks long startSeeksTime = System.nanoTime(); - for (Cell keyValue : seeks) { + for (ServerCell keyValue : seeks) { scanner.seek(keyValue); - Cell toVerify = scanner.next(); + ServerCell toVerify = scanner.next(); if (!keyValue.equals(toVerify)) { System.out.println(String.format("KeyValue doesn't match:\n" + "Orig key: %s\n" + "Ret key: %s", KeyValueUtil.ensureKeyValue(keyValue).getKeyString(), KeyValueUtil @@ -148,7 +148,7 @@ public class EncodedSeekPerformanceTest { */ public void runTests(Path path, DataBlockEncoding[] encodings) throws IOException { - List seeks = prepareListOfTestSeeks(path); + List seeks = prepareListOfTestSeeks(path); for (DataBlockEncoding blockEncoding : encodings) { runTest(path, blockEncoding, seeks); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java index 478e239..447f3f0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MultithreadedTestUtil; import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext; import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; @@ -466,7 +467,7 @@ public class TestAtomicOperation { // check: should always see exactly one column Scan s = new Scan(row); RegionScanner rs = region.getScanner(s); - List r = new ArrayList(); + List r = new ArrayList(); while(NextState.hasMoreValues(rs.next(r))); rs.close(); if (r.size() != 1) { @@ -560,7 +561,7 @@ public class TestAtomicOperation { ctx.stop(); Scan s = new Scan(); RegionScanner scanner = region.getScanner(s); - List results = new ArrayList(); + List results = new ArrayList(); scanner.next(results, 2); for (Cell keyValue : results) { assertEquals("50",Bytes.toString(CellUtil.cloneValue(keyValue))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java index 4a3d352..4e9c3d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.TableName; @@ -402,7 +403,7 @@ public class TestBlocksRead extends HBaseTestCase { Scan scan = new Scan(); scan.setCacheBlocks(false); RegionScanner rs = region.getScanner(scan); - List result = new ArrayList(2); + List result = new ArrayList(2); rs.next(result); assertEquals(2 * BLOOM_TYPE.length, result.size()); rs.close(); @@ -415,7 +416,7 @@ public class TestBlocksRead extends HBaseTestCase { blocksStart = blocksEnd; scan.setCacheBlocks(true); rs = region.getScanner(scan); - result = new ArrayList(2); + result = new ArrayList(2); rs.next(result); assertEquals(2 * BLOOM_TYPE.length, result.size()); rs.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java index 446c64c..68a2004 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java @@ -19,12 +19,12 @@ package org.apache.hadoop.hbase.regionserver; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.regionserver.InternalScanner.NextState; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -102,7 +102,7 @@ public class TestBlocksScanned extends HBaseTestCase { scan.setMaxVersions(1); InternalScanner s = r.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); while (NextState.hasMoreValues(s.next(results))); s.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java index 9b9db5a..2e03734 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java @@ -23,8 +23,8 @@ import java.util.SortedSet; import junit.framework.TestCase; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -46,7 +46,7 @@ public class TestCellSkipListSet extends TestCase { this.csls.add(kv); assertTrue(this.csls.contains(kv)); assertEquals(1, this.csls.size()); - Cell first = this.csls.first(); + ServerCell first = this.csls.first(); assertTrue(kv.equals(first)); assertTrue(Bytes.equals(kv.getValue(), first.getValue())); // Now try overwritting @@ -69,7 +69,7 @@ public class TestCellSkipListSet extends TestCase { } // Assert that we added 'total' values and that they are in order int count = 0; - for (Cell kv: this.csls) { + for (ServerCell kv: this.csls) { assertEquals("" + count, Bytes.toString(kv.getQualifier())); assertTrue(Bytes.equals(kv.getValue(), value1)); count++; @@ -82,7 +82,7 @@ public class TestCellSkipListSet extends TestCase { // Assert that we added 'total' values and that they are in order and that // we are getting back value2 count = 0; - for (Cell kv: this.csls) { + for (ServerCell kv: this.csls) { assertEquals("" + count, Bytes.toString(kv.getQualifier())); assertTrue(Bytes.equals(kv.getValue(), value2)); count++; @@ -100,8 +100,8 @@ public class TestCellSkipListSet extends TestCase { } // Assert that we added 'total' values and that they are in order int count = 0; - for (Iterator i = this.csls.descendingIterator(); i.hasNext();) { - Cell kv = i.next(); + for (Iterator i = this.csls.descendingIterator(); i.hasNext();) { + ServerCell kv = i.next(); assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier())); assertTrue(Bytes.equals(kv.getValue(), value1)); count++; @@ -114,8 +114,8 @@ public class TestCellSkipListSet extends TestCase { // Assert that we added 'total' values and that they are in order and that // we are getting back value2 count = 0; - for (Iterator i = this.csls.descendingIterator(); i.hasNext();) { - Cell kv = i.next(); + for (Iterator i = this.csls.descendingIterator(); i.hasNext();) { + ServerCell kv = i.next(); assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier())); assertTrue(Bytes.equals(kv.getValue(), value2)); count++; @@ -134,9 +134,9 @@ public class TestCellSkipListSet extends TestCase { if (i == 1) splitter = kv; this.csls.add(kv); } - SortedSet tail = this.csls.tailSet(splitter); + SortedSet tail = this.csls.tailSet(splitter); assertEquals(2, tail.size()); - SortedSet head = this.csls.headSet(splitter); + SortedSet head = this.csls.headSet(splitter); assertEquals(1, head.size()); // Now ensure that we get back right answer even when we do tail or head. // Now overwrite with a new value. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java index c09b32d..48dd148 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java @@ -32,13 +32,13 @@ import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; @@ -160,7 +160,7 @@ public class TestColumnSeeking { } InternalScanner scanner = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); while (NextState.hasMoreValues(scanner.next(results))) ; assertEquals(kvSet.size(), results.size()); @@ -272,7 +272,7 @@ public class TestColumnSeeking { } InternalScanner scanner = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); while (NextState.hasMoreValues(scanner.next(results))) ; assertEquals(kvSet.size(), results.size()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java index e1e5b89..57f095e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Scan; @@ -99,7 +100,7 @@ public class TestDefaultMemStore extends TestCase { int rowCount = addRows(this.memstore); List memstorescanners = this.memstore.getScanners(0); Scan scan = new Scan(); - List result = new ArrayList(); + List result = new ArrayList(); ScanInfo scanInfo = new ScanInfo(null, 0, 1, HConstants.LATEST_TIMESTAMP, KeepDeletedCells.FALSE, 0, this.memstore.comparator); @@ -527,7 +528,7 @@ public class TestDefaultMemStore extends TestCase { InternalScanner scanner = new StoreScanner(new Scan( Bytes.toBytes(startRowId)), scanInfo, scanType, null, memstore.getScanners(0)); - List results = new ArrayList(); + List results = new ArrayList(); for (int i = 0; NextState.hasMoreValues(scanner.next(results)); i++) { int rowId = startRowId + i; Cell left = results.get(0); @@ -831,7 +832,7 @@ public class TestDefaultMemStore extends TestCase { memstore = new DefaultMemStore(conf, KeyValue.COMPARATOR); long oldSize = memstore.size.get(); - List l = new ArrayList(); + List l = new ArrayList(); KeyValue kv1 = KeyValueTestUtil.create("r", "f", "q", 100, "v"); KeyValue kv2 = KeyValueTestUtil.create("r", "f", "q", 101, "v"); KeyValue kv3 = KeyValueTestUtil.create("r", "f", "q", 102, "v"); @@ -888,7 +889,7 @@ public class TestDefaultMemStore extends TestCase { t = runSnapshot(memstore); // test the case that the timeOfOldestEdit is updated after a KV upsert - List l = new ArrayList(); + List l = new ArrayList(); KeyValue kv1 = KeyValueTestUtil.create("r", "f", "q", 100, "v"); kv1.setSequenceId(100); l.add(kv1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java index 416ee28..0a656d1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java @@ -26,7 +26,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -35,6 +34,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MetaTableAccessor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -96,7 +96,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase { } InternalScanner s = mr.getScanner(new Scan()); try { - List keys = new ArrayList(); + List keys = new ArrayList(); while (NextState.hasMoreValues(s.next(keys))) { LOG.info(keys); keys.clear(); @@ -120,7 +120,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase { Scan scan = new Scan(firstRowInC); s = mr.getScanner(scan); try { - List keys = new ArrayList(); + List keys = new ArrayList(); while (NextState.hasMoreValues(s.next(keys))) { mr.delete(new Delete(CellUtil.cloneRow(keys.get(0)))); keys.clear(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index cb8d0be..af7892c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -91,6 +91,7 @@ import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread; import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RegionTooBusyException; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -489,7 +490,7 @@ public class TestHRegion { // open the second scanner RegionScanner scanner2 = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); System.out.println("Smallest read point:" + region.getSmallestReadPoint()); @@ -538,7 +539,7 @@ public class TestHRegion { region.compact(true); scanner1.reseek(Bytes.toBytes("r2")); - List results = new ArrayList(); + List results = new ArrayList(); scanner1.next(results); Cell keyValue = results.get(0); Assert.assertTrue(Bytes.compareTo(CellUtil.cloneRow(keyValue), Bytes.toBytes("r2")) == 0); @@ -1263,7 +1264,7 @@ public class TestHRegion { InternalScanner scanner = buildScanner(keyPrefix, value, r); int count = 0; boolean more = false; - List results = new ArrayList(); + List results = new ArrayList(); do { more = NextState.hasMoreValues(scanner.next(results)); if (results != null && !results.isEmpty()) @@ -1281,7 +1282,7 @@ public class TestHRegion { private int getNumberOfRows(String keyPrefix, String value, HRegion r) throws Exception { InternalScanner resultScanner = buildScanner(keyPrefix, value, r); int numberOfResults = 0; - List results = new ArrayList(); + List results = new ArrayList(); boolean more = false; do { more = NextState.hasMoreValues(resultScanner.next(results)); @@ -2216,7 +2217,7 @@ public class TestHRegion { Scan scan = new Scan(); scan.addFamily(fam1).addFamily(fam2); InternalScanner s = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); s.next(results); assertTrue(CellUtil.matchingRow(results.get(0), rowA)); @@ -2273,7 +2274,7 @@ public class TestHRegion { scan.addColumn(fam1, qual1); InternalScanner s = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(false, NextState.hasMoreValues(s.next(results))); assertEquals(1, results.size()); Cell kv = results.get(0); @@ -2395,7 +2396,7 @@ public class TestHRegion { assertEquals(expected.length, res.size()); for (int i = 0; i < res.size(); i++) { assertTrue(CellUtil.matchingRow(expected[i], res.rawCells()[i])); - assertTrue(CellUtil.matchingFamily(expected[i], res.rawCells()[i])); + assertTrue(CellUtil.matchingFamily(expected[i], (ServerCell) res.rawCells()[i])); assertTrue(CellUtil.matchingQualifier(expected[i], res.rawCells()[i])); } @@ -2691,14 +2692,14 @@ public class TestHRegion { scan.addFamily(fam4); InternalScanner is = region.getScanner(scan); - List res = null; + List res = null; // Result 1 List expected1 = new ArrayList(); expected1.add(new KeyValue(row1, fam2, null, ts, KeyValue.Type.Put, null)); expected1.add(new KeyValue(row1, fam4, null, ts, KeyValue.Type.Put, null)); - res = new ArrayList(); + res = new ArrayList(); is.next(res); for (int i = 0; i < res.size(); i++) { assertTrue(CellComparator.equalsIgnoreMvccVersion(expected1.get(i), res.get(i))); @@ -2709,7 +2710,7 @@ public class TestHRegion { expected2.add(new KeyValue(row2, fam2, null, ts, KeyValue.Type.Put, null)); expected2.add(new KeyValue(row2, fam4, null, ts, KeyValue.Type.Put, null)); - res = new ArrayList(); + res = new ArrayList(); is.next(res); for (int i = 0; i < res.size(); i++) { assertTrue(CellComparator.equalsIgnoreMvccVersion(expected2.get(i), res.get(i))); @@ -2763,7 +2764,7 @@ public class TestHRegion { Scan scan = new Scan(row1); scan.addColumn(fam1, qf1); scan.setMaxVersions(MAX_VERSIONS); - List actual = new ArrayList(); + List actual = new ArrayList(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(actual)); @@ -2826,7 +2827,7 @@ public class TestHRegion { scan.addColumn(fam1, qf1); scan.addColumn(fam1, qf2); scan.setMaxVersions(MAX_VERSIONS); - List actual = new ArrayList(); + List actual = new ArrayList(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(actual)); @@ -2908,7 +2909,7 @@ public class TestHRegion { scan.addColumn(fam1, qf2); int versions = 3; scan.setMaxVersions(versions); - List actual = new ArrayList(); + List actual = new ArrayList(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(actual)); @@ -2969,7 +2970,7 @@ public class TestHRegion { Scan scan = new Scan(row1); scan.addFamily(fam1); scan.setMaxVersions(MAX_VERSIONS); - List actual = new ArrayList(); + List actual = new ArrayList(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(actual)); @@ -3030,7 +3031,7 @@ public class TestHRegion { Scan scan = new Scan(row1); scan.addFamily(fam1); scan.setMaxVersions(MAX_VERSIONS); - List actual = new ArrayList(); + List actual = new ArrayList(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(actual)); @@ -3085,7 +3086,7 @@ public class TestHRegion { scan.addColumn(family, col1); InternalScanner s = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(false, NextState.hasMoreValues(s.next(results))); assertEquals(0, results.size()); } finally { @@ -3157,7 +3158,7 @@ public class TestHRegion { Scan scan = new Scan(row1); int versions = 3; scan.setMaxVersions(versions); - List actual = new ArrayList(); + List actual = new ArrayList(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(actual)); @@ -3222,7 +3223,7 @@ public class TestHRegion { scan.setLoadColumnFamiliesOnDemand(true); InternalScanner s = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); assertTrue(NextState.hasMoreValues(s.next(results))); assertEquals(results.size(), 1); results.clear(); @@ -3274,7 +3275,7 @@ public class TestHRegion { scan.setLoadColumnFamiliesOnDemand(true); Filter bogusFilter = new FilterBase() { @Override - public ReturnCode filterKeyValue(Cell ignored) throws IOException { + public ReturnCode filterKeyValue(ServerCell ignored) throws IOException { return ReturnCode.INCLUDE; } @Override @@ -3315,7 +3316,7 @@ public class TestHRegion { // r8: first:a // r9: first:a - List results = new ArrayList(); + List results = new ArrayList(); int index = 0; while (true) { boolean more = NextState.hasMoreValues(s.next(results, 3)); @@ -3568,7 +3569,7 @@ public class TestHRegion { new BinaryComparator(Bytes.toBytes(5L)))); int expectedCount = 0; - List res = new ArrayList(); + List res = new ArrayList(); boolean toggle = true; for (long i = 0; i < numRows; i++) { @@ -3701,7 +3702,7 @@ public class TestHRegion { Scan scan = new Scan(Bytes.toBytes("row0"), Bytes.toBytes("row1")); int expectedCount = numFamilies * numQualifiers; - List res = new ArrayList(); + List res = new ArrayList(); long prevTimestamp = 0L; for (int i = 0; i < testCount; i++) { @@ -3997,7 +3998,7 @@ public class TestHRegion { new BinaryComparator(Bytes.toBytes(0L))), new SingleColumnValueFilter(family, qual1, CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes(3L)))))); InternalScanner scanner = region.getScanner(idxScan); - List res = new ArrayList(); + List res = new ArrayList(); while (NextState.hasMoreValues(scanner.next(res))) ; @@ -4893,7 +4894,7 @@ public class TestHRegion { scan.addFamily(families[i]); InternalScanner s = r.getScanner(scan); try { - List curVals = new ArrayList(); + List curVals = new ArrayList(); boolean first = true; OUTER_LOOP: while (NextState.hasMoreValues(s.next(curVals))) { for (Cell kv : curVals) { @@ -5090,7 +5091,7 @@ public class TestHRegion { scan.setMaxVersions(5); scan.setReversed(true); InternalScanner scanner = region.getScanner(scan); - List currRow = new ArrayList(); + List currRow = new ArrayList(); boolean hasNext = NextState.hasMoreValues(scanner.next(currRow)); assertEquals(2, currRow.size()); assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); @@ -5144,7 +5145,7 @@ public class TestHRegion { region.put(put); Scan scan = new Scan(rowD); - List currRow = new ArrayList(); + List currRow = new ArrayList(); scan.setReversed(true); scan.setMaxVersions(5); InternalScanner scanner = region.getScanner(scan); @@ -5199,7 +5200,7 @@ public class TestHRegion { put.add(kv3); region.put(put); Scan scan = new Scan(); - List currRow = new ArrayList(); + List currRow = new ArrayList(); scan.setReversed(true); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(currRow)); @@ -5268,7 +5269,7 @@ public class TestHRegion { Scan scan = new Scan(rowD, rowA); scan.addColumn(families[0], col1); scan.setReversed(true); - List currRow = new ArrayList(); + List currRow = new ArrayList(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(currRow)); assertEquals(1, currRow.size()); @@ -5348,7 +5349,7 @@ public class TestHRegion { Scan scan = new Scan(rowD, rowA); scan.addColumn(families[0], col1); scan.setReversed(true); - List currRow = new ArrayList(); + List currRow = new ArrayList(); InternalScanner scanner = region.getScanner(scan); boolean hasNext = NextState.hasMoreValues(scanner.next(currRow)); assertEquals(1, currRow.size()); @@ -5490,7 +5491,7 @@ public class TestHRegion { scan.setBatch(3); scan.setReversed(true); InternalScanner scanner = region.getScanner(scan); - List currRow = new ArrayList(); + List currRow = new ArrayList(); boolean hasNext = false; // 1. scan out "row4" (5 kvs), "row5" can't be scanned out since not // included in scan range @@ -5590,7 +5591,7 @@ public class TestHRegion { scan.setReversed(true); scan.setBatch(10); InternalScanner scanner = region.getScanner(scan); - List currRow = new ArrayList(); + List currRow = new ArrayList(); boolean hasNext = NextState.hasMoreValues(scanner.next(currRow)); assertEquals(1, currRow.size()); assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); @@ -5655,7 +5656,7 @@ public class TestHRegion { Scan scan = new Scan(Bytes.toBytes(String.valueOf(startRow + 10 * numRows))); scan.setReversed(true); InternalScanner scanner = regions[1].getScanner(scan); - List currRow = new ArrayList(); + List currRow = new ArrayList(); boolean more = false; int verify = startRow + 2 * numRows - 1; do { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java index c480cd8..0b4ca3b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeepDeletedCells; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -212,7 +213,7 @@ public class TestKeepDeletes { s.setRaw(true); s.setMaxVersions(); InternalScanner scan = region.getScanner(s); - List kvs = new ArrayList(); + List kvs = new ArrayList(); scan.next(kvs); assertEquals(2, kvs.size()); @@ -226,7 +227,7 @@ public class TestKeepDeletes { s.setRaw(true); s.setMaxVersions(); scan = region.getScanner(s); - kvs = new ArrayList(); + kvs = new ArrayList(); scan.next(kvs); assertTrue(kvs.isEmpty()); @@ -263,7 +264,7 @@ public class TestKeepDeletes { s.setMaxVersions(); s.setTimeRange(0L, ts+1); InternalScanner scanner = region.getScanner(s); - List kvs = new ArrayList(); + List kvs = new ArrayList(); while (NextState.hasMoreValues(scanner.next(kvs))); assertTrue(kvs.isEmpty()); @@ -337,7 +338,7 @@ public class TestKeepDeletes { s.setRaw(true); s.setMaxVersions(); InternalScanner scan = region.getScanner(s); - List kvs = new ArrayList(); + List kvs = new ArrayList(); scan.next(kvs); assertEquals(8, kvs.size()); assertTrue(CellUtil.isDeleteFamily(kvs.get(0))); @@ -356,7 +357,7 @@ public class TestKeepDeletes { s.setMaxVersions(); s.setTimeRange(0, 1); scan = region.getScanner(s); - kvs = new ArrayList(); + kvs = new ArrayList(); scan.next(kvs); // nothing in this interval, not even delete markers assertTrue(kvs.isEmpty()); @@ -367,7 +368,7 @@ public class TestKeepDeletes { s.setMaxVersions(); s.setTimeRange(0, ts+2); scan = region.getScanner(s); - kvs = new ArrayList(); + kvs = new ArrayList(); scan.next(kvs); assertEquals(4, kvs.size()); assertTrue(CellUtil.isDeleteFamily(kvs.get(0))); @@ -382,7 +383,7 @@ public class TestKeepDeletes { s.setMaxVersions(); s.setTimeRange(ts+3, ts+5); scan = region.getScanner(s); - kvs = new ArrayList(); + kvs = new ArrayList(); scan.next(kvs); assertEquals(2, kvs.size()); assertArrayEquals(CellUtil.cloneValue(kvs.get(0)), T3); @@ -785,7 +786,7 @@ public class TestKeepDeletes { Scan s = new Scan(T1); s.setTimeRange(0, ts+1); InternalScanner scanner = region.getScanner(s); - List kvs = new ArrayList(); + List kvs = new ArrayList(); scanner.next(kvs); assertEquals(4, kvs.size()); scanner.close(); @@ -793,7 +794,7 @@ public class TestKeepDeletes { s = new Scan(T2); s.setTimeRange(0, ts+2); scanner = region.getScanner(s); - kvs = new ArrayList(); + kvs = new ArrayList(); scanner.next(kvs); assertEquals(4, kvs.size()); scanner.close(); @@ -942,7 +943,7 @@ public class TestKeepDeletes { // use max versions from the store(s) s.setMaxVersions(region.getStores().iterator().next().getScanInfo().getMaxVersions()); InternalScanner scan = region.getScanner(s); - List kvs = new ArrayList(); + List kvs = new ArrayList(); int res = 0; boolean hasMore; do { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java index 86a15ff..d96fcaf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java @@ -24,9 +24,9 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -75,18 +75,18 @@ public class TestKeyValueHeap extends HBaseTestCase { //1. The "smallest" KeyValue is in the same scanners as current //2. Current scanner gets empty - List l1 = new ArrayList(); + List l1 = new ArrayList(); l1.add(new KeyValue(row1, fam1, col5, data)); l1.add(new KeyValue(row2, fam1, col1, data)); l1.add(new KeyValue(row2, fam1, col2, data)); scanners.add(new Scanner(l1)); - List l2 = new ArrayList(); + List l2 = new ArrayList(); l2.add(new KeyValue(row1, fam1, col1, data)); l2.add(new KeyValue(row1, fam1, col2, data)); scanners.add(new Scanner(l2)); - List l3 = new ArrayList(); + List l3 = new ArrayList(); l3.add(new KeyValue(row1, fam1, col3, data)); l3.add(new KeyValue(row1, fam1, col4, data)); l3.add(new KeyValue(row1, fam2, col1, data)); @@ -110,7 +110,7 @@ public class TestKeyValueHeap extends HBaseTestCase { KeyValueHeap kvh = new KeyValueHeap(scanners, KeyValue.COMPARATOR); - List actual = new ArrayList(); + List actual = new ArrayList(); while(kvh.peek() != null){ actual.add(kvh.next()); } @@ -138,18 +138,18 @@ public class TestKeyValueHeap extends HBaseTestCase { //1. Seek KeyValue that is not in scanner //2. Check that smallest that is returned from a seek is correct - List l1 = new ArrayList(); + List l1 = new ArrayList(); l1.add(new KeyValue(row1, fam1, col5, data)); l1.add(new KeyValue(row2, fam1, col1, data)); l1.add(new KeyValue(row2, fam1, col2, data)); scanners.add(new Scanner(l1)); - List l2 = new ArrayList(); + List l2 = new ArrayList(); l2.add(new KeyValue(row1, fam1, col1, data)); l2.add(new KeyValue(row1, fam1, col2, data)); scanners.add(new Scanner(l2)); - List l3 = new ArrayList(); + List l3 = new ArrayList(); l3.add(new KeyValue(row1, fam1, col3, data)); l3.add(new KeyValue(row1, fam1, col4, data)); l3.add(new KeyValue(row1, fam2, col1, data)); @@ -167,7 +167,7 @@ public class TestKeyValueHeap extends HBaseTestCase { KeyValue seekKv = new KeyValue(row2, fam1, null, null); kvh.seek(seekKv); - List actual = new ArrayList(); + List actual = new ArrayList(); actual.add(kvh.peek()); assertEquals(expected.size(), actual.size()); @@ -185,18 +185,18 @@ public class TestKeyValueHeap extends HBaseTestCase { public void testScannerLeak() throws IOException { // Test for unclosed scanners (HBASE-1927) - List l1 = new ArrayList(); + List l1 = new ArrayList(); l1.add(new KeyValue(row1, fam1, col5, data)); l1.add(new KeyValue(row2, fam1, col1, data)); l1.add(new KeyValue(row2, fam1, col2, data)); scanners.add(new Scanner(l1)); - List l2 = new ArrayList(); + List l2 = new ArrayList(); l2.add(new KeyValue(row1, fam1, col1, data)); l2.add(new KeyValue(row1, fam1, col2, data)); scanners.add(new Scanner(l2)); - List l3 = new ArrayList(); + List l3 = new ArrayList(); l3.add(new KeyValue(row1, fam1, col3, data)); l3.add(new KeyValue(row1, fam1, col4, data)); l3.add(new KeyValue(row1, fam2, col1, data)); @@ -204,7 +204,7 @@ public class TestKeyValueHeap extends HBaseTestCase { l3.add(new KeyValue(row2, fam1, col3, data)); scanners.add(new Scanner(l3)); - List l4 = new ArrayList(); + List l4 = new ArrayList(); scanners.add(new Scanner(l4)); //Creating KeyValueHeap @@ -218,11 +218,11 @@ public class TestKeyValueHeap extends HBaseTestCase { } private static class Scanner extends CollectionBackedScanner { - private Iterator iter; - private Cell current; + private Iterator iter; + private ServerCell current; private boolean closed = false; - public Scanner(List list) { + public Scanner(List list) { super(list); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java index 12ab2ad..1626c80 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.HBaseTestCase.HRegionIncommon; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Result; @@ -131,7 +132,7 @@ public class TestMajorCompaction { // Now delete everything. InternalScanner s = r.getScanner(new Scan()); do { - List results = new ArrayList(); + List results = new ArrayList(); boolean result = NextState.hasMoreValues(s.next(results)); r.delete(new Delete(CellUtil.cloneRow(results.get(0)))); if (!result) break; @@ -144,7 +145,7 @@ public class TestMajorCompaction { s = r.getScanner(new Scan()); int counter = 0; do { - List results = new ArrayList(); + List results = new ArrayList(); boolean result = NextState.hasMoreValues(s.next(results)); if (!result) break; counter++; @@ -455,7 +456,7 @@ public class TestMajorCompaction { scan.setReversed(true); InternalScanner s = r.getScanner(scan); do { - List results = new ArrayList(); + List results = new ArrayList(); boolean result = NextState.hasMoreValues(s.next(results)); assertTrue(!results.isEmpty()); r.delete(new Delete(results.get(0).getRow())); @@ -471,7 +472,7 @@ public class TestMajorCompaction { s = r.getScanner(scan); int counter = 0; do { - List results = new ArrayList(); + List results = new ArrayList(); boolean result = NextState.hasMoreValues(s.next(results)); if (!result) break; counter++; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index 28d3ab9..2240217 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; @@ -243,7 +244,7 @@ public class TestMultiColumnScanner { } InternalScanner scanner = region.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); int kvPos = 0; int numResults = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java index 4a2f9ba..21d55eb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java @@ -31,7 +31,6 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.CoordinatedStateManagerFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -40,6 +39,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; @@ -424,7 +424,7 @@ public class TestRegionMergeTransaction { int rowcount = 0; InternalScanner scanner = r.getScanner(new Scan()); try { - List kvs = new ArrayList(); + List kvs = new ArrayList(); boolean hasNext = true; while (hasNext) { hasNext = NextState.hasMoreValues(scanner.next(kvs)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java index 92915e6..c7d5f25 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java @@ -33,7 +33,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -41,6 +40,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -429,7 +429,7 @@ public class TestReversibleScanners { private void verifyCountAndOrder(InternalScanner scanner, int expectedKVCount, int expectedRowCount, boolean forward) throws IOException { - List kvList = new ArrayList(); + List kvList = new ArrayList(); Result lastResult = null; int rowCount = 0; int kvCount = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java index ccae4bc..bbc993c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.compress.Compression; @@ -164,7 +165,7 @@ public class TestScanWithBloomError { List allResults = new ArrayList(); { // Limit the scope of results. - List results = new ArrayList(); + List results = new ArrayList(); while (NextState.hasMoreValues(scanner.next(results)) || results.size() > 0) { allResults.addAll(results); results.clear(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java index ee72ce4..c39d557 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java @@ -33,7 +33,6 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase.HRegionIncommon; @@ -43,6 +42,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.client.Delete; @@ -130,7 +130,7 @@ public class TestScanner { try { this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null); HBaseTestCase.addContent(this.r, HConstants.CATALOG_FAMILY); - List results = new ArrayList(); + List results = new ArrayList(); // Do simple test of getting one row only first. Scan scan = new Scan(Bytes.toBytes("abc"), Bytes.toBytes("abd")); scan.addFamily(HConstants.CATALOG_FAMILY); @@ -148,17 +148,17 @@ public class TestScanner { s = r.getScanner(scan); count = 0; - Cell kv = null; - results = new ArrayList(); + ServerCell cell = null; + results = new ArrayList(); for (boolean first = true; NextState.hasMoreValues(s.next(results));) { - kv = results.get(0); + cell = results.get(0); if (first) { - assertTrue(CellUtil.matchingRow(kv, startrow)); + assertTrue(CellUtil.matchingRow(cell, startrow)); first = false; } count++; } - assertTrue(Bytes.BYTES_COMPARATOR.compare(stoprow, CellUtil.cloneRow(kv)) > 0); + assertTrue(Bytes.BYTES_COMPARATOR.compare(stoprow, CellUtil.cloneRow(cell)) > 0); // We got something back. assertTrue(count > 10); s.close(); @@ -168,15 +168,15 @@ public class TestScanner { } void rowPrefixFilter(Scan scan) throws IOException { - List results = new ArrayList(); + List results = new ArrayList(); scan.addFamily(HConstants.CATALOG_FAMILY); InternalScanner s = r.getScanner(scan); boolean hasMore = true; while (hasMore) { hasMore = NextState.hasMoreValues(s.next(results)); - for (Cell kv : results) { - assertEquals((byte)'a', CellUtil.cloneRow(kv)[0]); - assertEquals((byte)'b', CellUtil.cloneRow(kv)[1]); + for (ServerCell cell : results) { + assertEquals((byte)'a', CellUtil.cloneRow(cell)[0]); + assertEquals((byte)'b', CellUtil.cloneRow(cell)[1]); } results.clear(); } @@ -184,13 +184,13 @@ public class TestScanner { } void rowInclusiveStopFilter(Scan scan, byte[] stopRow) throws IOException { - List results = new ArrayList(); + List results = new ArrayList(); scan.addFamily(HConstants.CATALOG_FAMILY); InternalScanner s = r.getScanner(scan); boolean hasMore = true; while (hasMore) { hasMore = NextState.hasMoreValues(s.next(results)); - for (Cell kv : results) { + for (ServerCell kv : results) { assertTrue(Bytes.compareTo(CellUtil.cloneRow(kv), stopRow) <= 0); } results.clear(); @@ -232,7 +232,7 @@ public class TestScanner { HBaseTestCase.addContent(this.r, HConstants.CATALOG_FAMILY); Scan scan = new Scan(); InternalScanner s = r.getScanner(scan); - List results = new ArrayList(); + List results = new ArrayList(); try { s.next(results); s.close(); @@ -376,7 +376,7 @@ public class TestScanner { throws IOException { InternalScanner scanner = null; Scan scan = null; - List results = new ArrayList(); + List results = new ArrayList(); byte [][][] scanColumns = { COLS, EXPLICIT_COLS @@ -427,9 +427,9 @@ public class TestScanner { } } - private boolean hasColumn(final List kvs, final byte [] family, + private boolean hasColumn(final List kvs, final byte [] family, final byte [] qualifier) { - for (Cell kv: kvs) { + for (ServerCell kv: kvs) { if (CellUtil.matchingFamily(kv, family) && CellUtil.matchingQualifier(kv, qualifier)) { return true; } @@ -437,9 +437,9 @@ public class TestScanner { return false; } - private Cell getColumn(final List kvs, final byte [] family, + private ServerCell getColumn(final List kvs, final byte [] family, final byte [] qualifier) { - for (Cell kv: kvs) { + for (ServerCell kv: kvs) { if (CellUtil.matchingFamily(kv, family) && CellUtil.matchingQualifier(kv, qualifier)) { return kv; } @@ -538,7 +538,7 @@ public class TestScanner { // run a major compact, column1 of firstRow will be cleaned. r.compact(true); - List results = new ArrayList(); + List results = new ArrayList(); s.next(results); // make sure returns column2 of firstRow @@ -547,7 +547,7 @@ public class TestScanner { assertTrue(CellUtil.matchingRow(results.get(0), firstRowBytes)); assertTrue(CellUtil.matchingFamily(results.get(0), fam2)); - results = new ArrayList(); + results = new ArrayList(); s.next(results); // get secondRow @@ -574,7 +574,7 @@ public class TestScanner { LOG.info("Taking out counting scan"); ScannerIncommon s = hri.getScanner(HConstants.CATALOG_FAMILY, EXPLICIT_COLS, HConstants.EMPTY_START_ROW, HConstants.LATEST_TIMESTAMP); - List values = new ArrayList(); + List values = new ArrayList(); int count = 0; boolean justFlushed = false; while (s.next(values)) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java index dfced3b..4b5bd68 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; @@ -217,8 +218,8 @@ public class TestSeekOptimizations { final long initialSeekCount = StoreFileScanner.getSeekCount(); final InternalScanner scanner = region.getScanner(scan); - final List results = new ArrayList(); - final List actualKVs = new ArrayList(); + final List results = new ArrayList(); + final List actualKVs = new ArrayList(); // Such a clumsy do-while loop appears to be the official way to use an // internalScanner. scanner.next() return value refers to the _next_ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java index 0ed7645..0f9c498 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java @@ -35,13 +35,13 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Server; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; @@ -357,7 +357,7 @@ public class TestSplitTransaction { int rowcount = 0; InternalScanner scanner = r.getScanner(new Scan()); try { - List kvs = new ArrayList(); + List kvs = new ArrayList(); boolean hasNext = true; while (hasNext) { hasNext = NextState.hasMoreValues(scanner.next(kvs)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 59793e0..81bb868 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.TableName; @@ -117,7 +118,7 @@ public class TestStore { new ConcurrentSkipListSet(Bytes.BYTES_COMPARATOR); List expected = new ArrayList(); - List result = new ArrayList(); + List result = new ArrayList(); long id = System.currentTimeMillis(); Get get = new Get(row); @@ -599,7 +600,7 @@ public class TestStore { Get get = new Get(row); get.addColumn(family, qf1); get.setMaxVersions(); // all versions. - List results = new ArrayList(); + List results = new ArrayList(); results = HBaseTestingUtility.getFromStoreFile(store, get); Assert.assertEquals(2, results.size()); @@ -667,7 +668,7 @@ public class TestStore { } long computedSize=0; - for (Cell cell : ((DefaultMemStore)this.store.memstore).cellSet) { + for (ServerCell cell : ((DefaultMemStore)this.store.memstore).cellSet) { long kvsize = DefaultMemStore.heapSizeChange(cell, true); //System.out.println(kv + " size= " + kvsize + " kvsize= " + kv.heapSize()); computedSize += kvsize; @@ -716,7 +717,7 @@ public class TestStore { Get get = new Get(row); get.addColumn(family, qf1); get.setMaxVersions(); // all versions. - List results = new ArrayList(); + List results = new ArrayList(); results = HBaseTestingUtility.getFromStoreFile(store, get); Assert.assertEquals(2, results.size()); @@ -908,7 +909,7 @@ public class TestStore { this.store.add(KeyValueUtil.ensureKeyValue(kv)); } - List result; + List result; Get get = new Get(Bytes.toBytes(1)); get.addColumn(family,qf1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java index c723c2e..263ab3e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java @@ -30,11 +30,11 @@ import java.util.TreeSet; import junit.framework.TestCase; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.regionserver.InternalScanner.NextState; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -90,7 +90,7 @@ public class TestStoreScanner extends TestCase { scanSpec.setMaxVersions(); StoreScanner scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(5, results.size()); assertEquals(kvs[kvs.length - 1], results.get(0)); @@ -100,7 +100,7 @@ public class TestStoreScanner extends TestCase { scanSpec.setMaxVersions(); scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - results = new ArrayList(); + results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(2, results.size()); // Another range. @@ -109,7 +109,7 @@ public class TestStoreScanner extends TestCase { scanSpec.setMaxVersions(); scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - results = new ArrayList(); + results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(1, results.size()); // See how TimeRange and Versions interact. @@ -119,7 +119,7 @@ public class TestStoreScanner extends TestCase { scanSpec.setMaxVersions(3); scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - results = new ArrayList(); + results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(3, results.size()); } @@ -140,7 +140,7 @@ public class TestStoreScanner extends TestCase { StoreScanner scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(1, results.size()); assertEquals(kvs[0], results.get(0)); @@ -167,7 +167,7 @@ public class TestStoreScanner extends TestCase { StoreScanner scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - List results = new ArrayList(); + List results = new ArrayList(); scan.next(results); assertEquals(1, results.size()); assertEquals(kvs[0], results.get(0)); @@ -195,7 +195,7 @@ public class TestStoreScanner extends TestCase { StoreScanner scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertFalse(NextState.hasMoreValues(scan.next(results))); assertEquals(0, results.size()); } @@ -215,7 +215,7 @@ public class TestStoreScanner extends TestCase { StoreScanner scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(0, results.size()); @@ -241,7 +241,7 @@ public class TestStoreScanner extends TestCase { StoreScanner scan = new StoreScanner(new Scan(Bytes.toBytes("R1")), scanInfo, scanType, getCols("a"), scanners); - List results = new ArrayList(); + List results = new ArrayList(); // the two put at ts=now will be masked by the 1 delete, and // since the scan default returns 1 version we'll return the newest // key, which is kvs[2], now-100. @@ -266,7 +266,7 @@ public class TestStoreScanner extends TestCase { Scan scanSpec = new Scan(Bytes.toBytes("R1")).setMaxVersions(2); StoreScanner scan = new StoreScanner(scanSpec, scanInfo, scanType, getCols("a"), scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(2, results.size()); assertEquals(kvs2[1], results.get(0)); @@ -282,7 +282,7 @@ public class TestStoreScanner extends TestCase { List scanners = scanFixture(kvs); StoreScanner scan = new StoreScanner(new Scan(Bytes.toBytes("R1")), scanInfo, scanType, null, scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(2, results.size()); assertEquals(kvs[0], results.get(0)); @@ -311,7 +311,7 @@ public class TestStoreScanner extends TestCase { List scanners = scanFixture(kvs); StoreScanner scan = new StoreScanner(new Scan().setMaxVersions(2), scanInfo, scanType, null, scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(5, results.size()); assertEquals(kvs[0], results.get(0)); @@ -340,7 +340,7 @@ public class TestStoreScanner extends TestCase { StoreScanner scan = new StoreScanner( new Scan().setMaxVersions(Integer.MAX_VALUE), scanInfo, scanType, null, scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(0, results.size()); assertEquals(true, NextState.hasMoreValues(scan.next(results))); @@ -360,7 +360,7 @@ public class TestStoreScanner extends TestCase { List scanners = scanFixture(kvs); StoreScanner scan = new StoreScanner(new Scan(), scanInfo, scanType, null, scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(1, results.size()); assertEquals(kvs[3], results.get(0)); @@ -384,7 +384,7 @@ public class TestStoreScanner extends TestCase { StoreScanner scan = new StoreScanner(new Scan(), scanInfo, scanType, getCols("a", "d"), scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scan.next(results))); assertEquals(2, results.size()); assertEquals(kvs[0], results.get(0)); @@ -425,7 +425,7 @@ public class TestStoreScanner extends TestCase { new StoreScanner(scan, scanInfo, scanType, null, scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scanner.next(results))); assertEquals(2, results.size()); assertEquals(kvs[1], results.get(0)); @@ -495,7 +495,7 @@ public class TestStoreScanner extends TestCase { StoreScanner scanner = new StoreScanner(scan, scanInfo, scanType, null, scanners); - List results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scanner.next(results))); assertEquals(1, results.size()); assertEquals(kvs[1], results.get(0)); @@ -559,8 +559,7 @@ public class TestStoreScanner extends TestCase { new StoreScanner(scan, scanInfo, ScanType.COMPACT_DROP_DELETES, null, scanners, HConstants.OLDEST_TIMESTAMP); - List results = new ArrayList(); - results = new ArrayList(); + List results = new ArrayList(); assertEquals(true, NextState.hasMoreValues(scanner.next(results))); assertEquals(kvs[0], results.get(0)); assertEquals(kvs[2], results.get(1)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java index 06bbd54..001f5cc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java @@ -41,12 +41,12 @@ import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; @@ -237,7 +237,7 @@ public class TestStripeCompactor { } @Override - public NextState next(List results) throws IOException { + public NextState next(List results) throws IOException { if (kvs.isEmpty()) return NextState.makeState(NextState.State.NO_MORE_VALUES); results.add(kvs.remove(0)); if (!kvs.isEmpty()) { @@ -247,12 +247,12 @@ public class TestStripeCompactor { } } @Override - public NextState next(List result, int limit) throws IOException { + public NextState next(List result, int limit) throws IOException { return next(result); } @Override - public NextState next(List result, int limit, long remainingResultSize) + public NextState next(List result, int limit, long remainingResultSize) throws IOException { return next(result); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java index 303ee36..68b5255 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; @@ -94,7 +95,7 @@ public class TestWideScanner extends HBaseTestCase { try { this.r = createNewHRegion(TESTTABLEDESC, null, null); int inserted = addWideContent(this.r); - List results = new ArrayList(); + List results = new ArrayList(); Scan scan = new Scan(); scan.addFamily(A); scan.addFamily(B); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java index 3294f6d..dbc7292 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java @@ -44,11 +44,11 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.regionserver.BloomType; @@ -774,7 +774,7 @@ public class TestStripeCompactionPolicy { } @Override - public NextState next(List results) throws IOException { + public NextState next(List results) throws IOException { if (kvs.isEmpty()) return NextState.makeState(NextState.State.NO_MORE_VALUES); results.add(kvs.remove(0)); @@ -786,12 +786,12 @@ public class TestStripeCompactionPolicy { } @Override - public NextState next(List result, int limit) throws IOException { + public NextState next(List result, int limit) throws IOException { return next(result); } @Override - public NextState next(List result, int limit, long remainingResultSize) + public NextState next(List result, int limit, long remainingResultSize) throws IOException { return next(result); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java index afdcdc7..6cb1102 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java @@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MiniHBaseCluster; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; @@ -749,7 +750,7 @@ public class TestWALReplay { private int getScannedCount(RegionScanner scanner) throws IOException { int scannedCount = 0; - List results = new ArrayList(); + List results = new ArrayList(); while (true) { boolean existMore = NextState.hasMoreValues(scanner.next(results)); if (!results.isEmpty()) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java index 0ec410e..65ee24d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java @@ -28,13 +28,13 @@ import java.util.Random; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -434,7 +434,7 @@ public class TestMasterReplication { @Override public void preGetOp(final ObserverContext c, - final Get get, final List result) throws IOException { + final Get get, final List result) throws IOException { if (get.getAttribute("count") != null) { result.clear(); // order is important! diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index f1e956c..6ef77a5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.client.Admin; @@ -233,7 +234,7 @@ public class TestReplicationWithTags { @Override public void postGetOp(ObserverContext e, Get get, - List results) throws IOException { + List results) throws IOException { if (results.size() > 0) { // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index 63fe418..6b663bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -154,7 +155,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer assert (labelsRegion != null || systemCall); List auths = new ArrayList(); Get get = new Get(user); - List cells = null; + List cells = null; if (labelsRegion == null) { Table table = null; Connection connection = null; @@ -191,7 +192,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer if (groups != null && groups.length > 0) { for (String group : groups) { Get get = new Get(Bytes.toBytes(AccessControlLists.toGroupEntry(group))); - List cells = null; + List cells = null; if (labelsRegion == null) { Table table = null; Connection connection = null; @@ -260,7 +261,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer if (isReadFromSystemAuthUser()) { return new VisibilityExpEvaluator() { @Override - public boolean evaluate(Cell cell) throws IOException { + public boolean evaluate(ServerCell cell) throws IOException { return true; } }; @@ -280,7 +281,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer final List authLabelsFinal = authLabels; return new VisibilityExpEvaluator() { @Override - public boolean evaluate(Cell cell) throws IOException { + public boolean evaluate(ServerCell cell) throws IOException { boolean visibilityTagPresent = false; // Save an object allocation where we can if (cell.getTagsLength() > 0) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java index bc34ce3..36102c4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagRewriteCell; @@ -50,7 +51,6 @@ import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -433,7 +433,7 @@ public class TestVisibilityLabelsReplication { @Override public void postGetOp(ObserverContext e, Get get, - List results) throws IOException { + List results) throws IOException { if (results.size() > 0) { // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java index c071a33..79a30ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java @@ -25,7 +25,6 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -33,6 +32,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.ServerCell; import org.apache.hadoop.hbase.TableDescriptor; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -229,9 +229,9 @@ public class TestMergeTool extends HBaseTestCase { scan.addFamily(FAMILY); InternalScanner scanner = merged.getScanner(scan); try { - List testRes = null; + List testRes = null; while (true) { - testRes = new ArrayList(); + testRes = new ArrayList(); boolean hasNext = NextState.hasMoreValues(scanner.next(testRes)); if (!hasNext) { break;