diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java index dd2c2f0..4ba499f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java @@ -34,11 +34,9 @@ import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -48,14 +46,14 @@ import org.apache.hadoop.hbase.security.access.AccessControlUtil; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.ClassSize; - import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClassSize; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Public public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable, @@ -230,7 +228,8 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C if (tags != null) { List tagsString = new ArrayList<>(tags.size()); for (Tag t : tags) { - tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(t))); + tagsString + .add((t.getType()) + ":" + Bytes.toStringBinary(Tag.cloneValue(t))); } stringMap.put("tag", tagsString); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index d154177..aa6b9b1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -30,7 +30,6 @@ import java.util.NavigableSet; import java.util.function.Function; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CacheEvictionStats; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellScanner; @@ -45,8 +44,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.Delete; @@ -92,6 +89,7 @@ import org.apache.hadoop.hbase.util.DynamicClassLoader; import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.ipc.RemoteException; +import org.apache.yetus.audience.InterfaceAudience; import com.google.protobuf.ByteString; import com.google.protobuf.CodedInputStream; @@ -524,7 +522,8 @@ public final class ProtobufUtil { .setTags(allTagsBytes) .build()); } else { - List tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); + List tags = + Tag.asList(allTagsBytes, 0, (short) allTagsBytes.length); Tag[] tagsArray = new Tag[tags.size()]; put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray)); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index bc6e427..4dcce2b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -61,7 +61,6 @@ import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.ClientUtil; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; @@ -658,7 +657,8 @@ public final class ProtobufUtil { .setTags(allTagsBytes) .build()); } else { - List tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); + List tags = + Tag.asList(allTagsBytes, 0, (short) allTagsBytes.length); Tag[] tagsArray = new Tag[tags.size()]; put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray)); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java index f5833c8..8b14ecc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java @@ -184,9 +184,11 @@ public interface Cell { /** * HBase internally uses 2 bytes to store tags length in Cell. * As the tags length is always a non-negative number, to make good use of the sign bit, - * the max of tags length is defined as {@link TagUtil#MAX_TAGS_LENGTH}, which is 2 * Short.MAX_VALUE + 1 = 65535. + * the max of tags length is defined as {@link PrivateCellUtil#MAX_TAGS_LENGTH}, + * which is 2 * Short.MAX_VALUE + 1 = 65535. * As a result, the return type is int, because a short is not capable of handling that. - * Please note that even if the return type is int, the max tags length is far less than Integer.MAX_VALUE. + * Please note that even if the return type is int, the max tags length is far + * less than Integer.MAX_VALUE. * * @return the total length of the tags in the Cell. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 30283f1..4e193a5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -38,7 +38,6 @@ import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience.Private; -import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.util.ByteBufferUtils; @@ -51,12 +50,10 @@ import org.apache.hadoop.hbase.util.Bytes; * have been marked deprecated in HBase-2.0 which will be subsequently removed in HBase-3.0 */ @InterfaceAudience.Public -public final class CellUtil { +public class CellUtil { - /** - * Private constructor to keep this class from being instantiated. - */ - private CellUtil() { + // package private + CellUtil() { } /******************* ByteRange *******************************/ @@ -593,7 +590,7 @@ public final class CellUtil { */ @Deprecated public static Cell createCell(Cell cell, List tags) { - return createCell(cell, TagUtil.fromList(tags)); + return createCell(cell, Tag.fromList(tags)); } /** @@ -1073,45 +1070,6 @@ public final class CellUtil { return estimatedSerializedSizeOf(cell); } - /********************* tags *************************************/ - /** - * Util method to iterate through the tags - * @param tags - * @param offset - * @param length - * @return iterator for the tags - * @deprecated As of 2.0.0 and will be removed in 3.0.0 Instead use - * {@link PrivateCellUtil#tagsIterator(Cell)} - */ - @Deprecated - public static Iterator tagsIterator(final byte[] tags, final int offset, final int length) { - return new Iterator() { - private int pos = offset; - private int endOffset = offset + length - 1; - - @Override - public boolean hasNext() { - return this.pos < endOffset; - } - - @Override - public Tag next() { - if (hasNext()) { - int curTagLen = Bytes.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); - Tag tag = new ArrayBackedTag(tags, pos, curTagLen + TAG_LENGTH_SIZE); - this.pos += Bytes.SIZEOF_SHORT + curTagLen; - return tag; - } - return null; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - /** * @param cell The Cell * @return Tags in the given Cell as a List @@ -1670,35 +1628,6 @@ public final class CellUtil { } /** - * Used when a cell needs to be compared with a key byte[] such as cases of finding the index from - * the index block, bloom keys from the bloom blocks This byte[] is expected to be serialized in - * the KeyValue serialization format If the KeyValue (Cell's) serialization format changes this - * method cannot be used. - * @param comparator the cell comparator - * @param left the cell to be compared - * @param key the serialized key part of a KeyValue - * @param offset the offset in the key byte[] - * @param length the length of the key byte[] - * @return an int greater than 0 if left is greater than right lesser than 0 if left is lesser - * than right equal to 0 if left is equal to right - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0 - */ - @VisibleForTesting - @Deprecated - public static final int compare(CellComparator comparator, Cell left, byte[] key, int offset, - int length) { - // row - short rrowlength = Bytes.toShort(key, offset); - int c = comparator.compareRows(left, key, offset + Bytes.SIZEOF_SHORT, rrowlength); - if (c != 0) return c; - - // Compare the rest of the two KVs without making any assumptions about - // the common prefix. This function will not compare rows anyway, so we - // don't need to tell it that the common prefix includes the row. - return PrivateCellUtil.compareWithoutRow(comparator, left, key, offset, length, rrowlength); - } - - /** * Compares the cell's family with the given byte[] * @param left the cell for which the family has to be compared * @param right the byte[] having the family diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java index 4d16fca..c3e5bbc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java @@ -20,13 +20,14 @@ package org.apache.hadoop.hbase; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.yetus.audience.InterfaceAudience; /** * Extension to {@link Cell} with server side required functions. Server side Cell implementations - * must implement this. + * must implement this. CPs accessing cells in server side can safely assume cells to be of ExtendedCell * @see SettableSequenceId * @see SettableTimestamp */ @@ -35,6 +36,8 @@ public interface ExtendedCell extends Cell, SettableSequenceId, SettableTimestam Cloneable { public static int CELL_NOT_BASED_ON_CHUNK = -1; + + static final int MAX_TAGS_LENGTH = (2 * Short.MAX_VALUE) + 1; /** * Write this cell to an OutputStream in a {@link KeyValue} format. *
KeyValue format
@@ -83,4 +86,40 @@ public interface ExtendedCell extends Cell, SettableSequenceId, SettableTimestam default int getChunkId() { return CELL_NOT_BASED_ON_CHUNK; } + + /** + * Allows cloning the tags in the cell to a new byte[] + * @return the byte[] having the tags + */ + default byte[] cloneTags() { + return PrivateCellUtil.cloneTags(this); + } + + /** + * Creates a list of tags in the current cell + * @return a list of tags + */ + default List getTags() { + return PrivateCellUtil.getTags(this); + } + + /** + * Returns the specific tag of the given type + * @param type the type of the tag + * @return the specific tag if available or null + */ + default Tag getTag(byte type) { + return PrivateCellUtil.getTag(this, type); + } + + /** + * Check the length of tags. If it is invalid, throw IllegalArgumentException + * @param tagsLength + * @throws IllegalArgumentException if tagslength is invalid + */ + public static void checkForTagsLength(int tagsLength) { + if (tagsLength > MAX_TAGS_LENGTH) { + throw new IllegalArgumentException("tagslength " + tagsLength + " > " + MAX_TAGS_LENGTH); + } + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java index 41d204c..dacc2f4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java @@ -26,7 +26,7 @@ import org.apache.yetus.audience.InterfaceAudience; * Use {@link ExtendedCellBuilderFactory} to get ExtendedCellBuilder instance. * TODO: ditto for ByteBufferCell? */ -@InterfaceAudience.Private +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) public interface ExtendedCellBuilder extends CellBuilder { @Override ExtendedCellBuilder setRow(final byte[] row); @@ -65,5 +65,10 @@ public interface ExtendedCellBuilder extends CellBuilder { ExtendedCellBuilder setTags(final byte[] tags); ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength); + /** + * Internal usage. Be careful before you use this while building a cell + * @param seqId set the seqId + * @return the current ExternalCellBuilder + */ ExtendedCellBuilder setSequenceId(final long seqId); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java index 87f84c9..7890466 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java @@ -25,13 +25,17 @@ import org.apache.yetus.audience.InterfaceAudience; public final class ExtendedCellBuilderFactory { public static ExtendedCellBuilder create(CellBuilderType type) { + return create(type, true); + } + + public static ExtendedCellBuilder create(CellBuilderType type, boolean allowSeqIdUpdate) { switch (type) { - case SHALLOW_COPY: - return new IndividualBytesFieldCellBuilder(); - case DEEP_COPY: - return new KeyValueBuilder(); - default: - throw new UnsupportedOperationException("The type:" + type + " is unsupported"); + case SHALLOW_COPY: + return new IndividualBytesFieldCellBuilder(); + case DEEP_COPY: + return new KeyValueBuilder(allowSeqIdUpdate); + default: + throw new UnsupportedOperationException("The type:" + type + " is unsupported"); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java index a15843c..f700327 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java @@ -94,7 +94,7 @@ public class IndividualBytesFieldCell implements ExtendedCell { } // Check tags - TagUtil.checkForTagsLength(tagsLength); + ExtendedCell.checkForTagsLength(tagsLength); checkArrayBounds(row, rOffset, rLength); checkArrayBounds(family, fOffset, fLength); checkArrayBounds(qualifier, qOffset, qLength); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index c3a429e..51d083d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -34,13 +34,12 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.RawComparator; - -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.yetus.audience.InterfaceAudience; /** * An HBase Key/Value. This is the fundamental HBase Type. *

@@ -766,7 +765,7 @@ public class KeyValue implements ExtendedCell { if (qlength > Integer.MAX_VALUE - rlength - flength) { throw new IllegalArgumentException("Qualifier > " + Integer.MAX_VALUE); } - TagUtil.checkForTagsLength(tagsLength); + ExtendedCell.checkForTagsLength(tagsLength); // Key length long longkeylength = getKeyDataStructureSize(rlength, flength, qlength); if (longkeylength > Integer.MAX_VALUE) { @@ -884,7 +883,7 @@ public class KeyValue implements ExtendedCell { tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE; } } - TagUtil.checkForTagsLength(tagsLength); + ExtendedCell.checkForTagsLength(tagsLength); int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); int keyValueLength = (int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, tagsLength); @@ -918,7 +917,7 @@ public class KeyValue implements ExtendedCell { int tlen = t.getValueLength(); pos = Bytes.putAsShort(buffer, pos, tlen + Tag.TYPE_LENGTH_SIZE); pos = Bytes.putByte(buffer, pos, t.getType()); - TagUtil.copyValueTo(t, buffer, pos); + Tag.copyValueTo(t, buffer, pos); pos += tlen; } } @@ -951,7 +950,7 @@ public class KeyValue implements ExtendedCell { int vlength, byte[] tags, int tagsOffset, int tagsLength) { checkParameters(row, rlength, family, flength, qlength, vlength); - TagUtil.checkForTagsLength(tagsLength); + ExtendedCell.checkForTagsLength(tagsLength); // Allocate right-sized byte array. int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, @@ -1001,7 +1000,7 @@ public class KeyValue implements ExtendedCell { tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE; } } - TagUtil.checkForTagsLength(tagsLength); + ExtendedCell.checkForTagsLength(tagsLength); // Allocate right-sized byte array. int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, @@ -1041,7 +1040,7 @@ public class KeyValue implements ExtendedCell { int tlen = t.getValueLength(); pos = Bytes.putAsShort(bytes, pos, tlen + Tag.TYPE_LENGTH_SIZE); pos = Bytes.putByte(bytes, pos, t.getType()); - TagUtil.copyValueTo(t, bytes, pos); + Tag.copyValueTo(t, bytes, pos); pos += tlen; } } @@ -1530,7 +1529,7 @@ public class KeyValue implements ExtendedCell { if (tagsLength == 0) { return EMPTY_ARRAY_LIST; } - return TagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength); + return Tag.asList(getTagsArray(), getTagsOffset(), tagsLength); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java index 94e2522..32383c6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java @@ -21,6 +21,16 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private class KeyValueBuilder extends ExtendedCellBuilderImpl { + boolean allowSeqIdUpdate = false; + + KeyValueBuilder() { + this(true); + } + + KeyValueBuilder(boolean allowSeqIdUpdate) { + this.allowSeqIdUpdate = allowSeqIdUpdate; + } + @Override protected ExtendedCell innerBuild() { KeyValue kv = new KeyValue(row, rOffset, rLength, @@ -29,7 +39,9 @@ class KeyValueBuilder extends ExtendedCellBuilderImpl { timestamp, type, value, vOffset, vLength, tags, tagsOffset, tagsLength); - kv.setSequenceId(seqId); + if (allowSeqIdUpdate) { + kv.setSequenceId(seqId); + } return kv; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java index d70d974..ec6b0d2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java @@ -49,14 +49,12 @@ import com.google.common.annotations.VisibleForTesting; * rich set of APIs than those in {@link CellUtil} for internal usage. */ @InterfaceAudience.Private -// TODO : Make Tag IA.LimitedPrivate and move some of the Util methods to CP exposed Util class -public class PrivateCellUtil { +public class PrivateCellUtil extends CellUtil { /** * Private constructor to keep this class from being instantiated. */ private PrivateCellUtil() { - } /******************* ByteRange *******************************/ @@ -82,64 +80,6 @@ public class PrivateCellUtil { return range.set(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } - /** - * Returns tag value in a new byte array. If server-side, use {@link Tag#getValueArray()} with - * appropriate {@link Tag#getValueOffset()} and {@link Tag#getValueLength()} instead to save on - * allocations. - * @param cell - * @return tag value in a new byte array. - */ - public static byte[] getTagsArray(Cell cell) { - byte[] output = new byte[cell.getTagsLength()]; - copyTagsTo(cell, output, 0); - return output; - } - - public static byte[] cloneTags(Cell cell) { - byte[] output = new byte[cell.getTagsLength()]; - copyTagsTo(cell, output, 0); - return output; - } - - /** - * Copies the tags info into the tag portion of the cell - * @param cell - * @param destination - * @param destinationOffset - * @return position after tags - */ - public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) { - int tlen = cell.getTagsLength(); - if (cell instanceof ByteBufferCell) { - ByteBufferUtils.copyFromBufferToArray(destination, - ((ByteBufferCell) cell).getTagsByteBuffer(), ((ByteBufferCell) cell).getTagsPosition(), - destinationOffset, tlen); - } else { - System.arraycopy(cell.getTagsArray(), cell.getTagsOffset(), destination, destinationOffset, - tlen); - } - return destinationOffset + tlen; - } - - /** - * Copies the tags info into the tag portion of the cell - * @param cell - * @param destination - * @param destinationOffset - * @return the position after tags - */ - public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) { - int tlen = cell.getTagsLength(); - if (cell instanceof ByteBufferCell) { - ByteBufferUtils.copyFromBufferToBuffer(((ByteBufferCell) cell).getTagsByteBuffer(), - destination, ((ByteBufferCell) cell).getTagsPosition(), destinationOffset, tlen); - } else { - ByteBufferUtils.copyFromArrayToBuffer(destination, destinationOffset, cell.getTagsArray(), - cell.getTagsOffset(), tlen); - } - return destinationOffset + tlen; - } - /********************* misc *************************************/ public static byte getRowByte(Cell cell, int index) { @@ -168,7 +108,7 @@ public class PrivateCellUtil { * @return A new cell which is having the extra tags also added to it. */ public static Cell createCell(Cell cell, List tags) { - return createCell(cell, TagUtil.fromList(tags)); + return createCell(cell, Tag.fromList(tags)); } /** @@ -878,33 +818,92 @@ public class PrivateCellUtil { return t == Type.DeleteColumn.getCode() || t == Type.DeleteFamily.getCode(); } - private static Iterator tagsIterator(final ByteBuffer tags, final int offset, - final int length) { - return new Iterator() { - private int pos = offset; - private int endOffset = offset + length - 1; + public static byte[] cloneTags(Cell cell) { + byte[] output = new byte[cell.getTagsLength()]; + copyTagsTo(cell, output, 0); + return output; + } - @Override - public boolean hasNext() { - return this.pos < endOffset; - } + /** + * Copies the tags info into the tag portion of the cell + * @param cell + * @param destination + * @param destinationOffset + * @return position after tags + */ + public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) { + int tlen = cell.getTagsLength(); + if (cell instanceof ByteBufferCell) { + ByteBufferUtils.copyFromBufferToArray(destination, + ((ByteBufferCell) cell).getTagsByteBuffer(), ((ByteBufferCell) cell).getTagsPosition(), + destinationOffset, tlen); + } else { + System.arraycopy(cell.getTagsArray(), cell.getTagsOffset(), destination, destinationOffset, + tlen); + } + return destinationOffset + tlen; + } - @Override - public Tag next() { - if (hasNext()) { - int curTagLen = ByteBufferUtils.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); - Tag tag = new ByteBufferTag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE); - this.pos += Bytes.SIZEOF_SHORT + curTagLen; - return tag; - } - return null; - } + /** + * Copies the tags info into the tag portion of the cell + * @param cell + * @param destination + * @param destinationOffset + * @return the position after tags + */ + public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) { + int tlen = cell.getTagsLength(); + if (cell instanceof ByteBufferCell) { + ByteBufferUtils.copyFromBufferToBuffer(((ByteBufferCell) cell).getTagsByteBuffer(), + destination, ((ByteBufferCell) cell).getTagsPosition(), destinationOffset, tlen); + } else { + ByteBufferUtils.copyFromArrayToBuffer(destination, destinationOffset, cell.getTagsArray(), + cell.getTagsOffset(), tlen); + } + return destinationOffset + tlen; + } - @Override - public void remove() { - throw new UnsupportedOperationException(); + /** + * @param cell The Cell + * @return Tags in the given Cell as a List + */ + public static List getTags(Cell cell) { + List tags = new ArrayList<>(); + Iterator tagsItr = tagsIterator(cell); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } + + /** + * Retrieve Cell's first tag, matching the passed in type + * @param cell The Cell + * @param type Type of the Tag to retrieve + * @return null if there is no tag of the passed in tag type + */ + public static Tag getTag(Cell cell, byte type) { + boolean bufferBacked = cell instanceof ByteBufferCell; + int length = cell.getTagsLength(); + int offset = bufferBacked ? ((ByteBufferCell) cell).getTagsPosition() : cell.getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen; + if (bufferBacked) { + ByteBuffer tagsBuffer = ((ByteBufferCell) cell).getTagsByteBuffer(); + tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE); + if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) { + return new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE); + } + } else { + tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE); + if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { + return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE); + } } - }; + pos += TAG_LENGTH_SIZE + tagLen; + } + return null; } /** @@ -953,47 +952,33 @@ public class PrivateCellUtil { }; } - /** - * @param cell The Cell - * @return Tags in the given Cell as a List - */ - public static List getTags(Cell cell) { - List tags = new ArrayList<>(); - Iterator tagsItr = tagsIterator(cell); - while (tagsItr.hasNext()) { - tags.add(tagsItr.next()); - } - return tags; - } + private static Iterator tagsIterator(final ByteBuffer tags, final int offset, + final int length) { + return new Iterator() { + private int pos = offset; + private int endOffset = offset + length - 1; - /** - * Retrieve Cell's first tag, matching the passed in type - * @param cell The Cell - * @param type Type of the Tag to retrieve - * @return null if there is no tag of the passed in tag type - */ - public static Tag getTag(Cell cell, byte type) { - boolean bufferBacked = cell instanceof ByteBufferCell; - int length = cell.getTagsLength(); - int offset = bufferBacked ? ((ByteBufferCell) cell).getTagsPosition() : cell.getTagsOffset(); - int pos = offset; - while (pos < offset + length) { - int tagLen; - if (bufferBacked) { - ByteBuffer tagsBuffer = ((ByteBufferCell) cell).getTagsByteBuffer(); - tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE); - if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) { - return new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE); - } - } else { - tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE); - if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { - return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE); + @Override + public boolean hasNext() { + return this.pos < endOffset; + } + + @Override + public Tag next() { + if (hasNext()) { + int curTagLen = ByteBufferUtils.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); + Tag tag = new ByteBufferTag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE); + this.pos += Bytes.SIZEOF_SHORT + curTagLen; + return tag; } + return null; } - pos += TAG_LENGTH_SIZE + tagLen; - } - return null; + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java index 8a25898..f08dca1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java @@ -20,9 +20,12 @@ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; /** @@ -33,7 +36,7 @@ import org.apache.hadoop.hbase.util.Bytes; *

* See {@link TagType} for reserved tag types. */ -@InterfaceAudience.Private +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @InterfaceStability.Evolving public interface Tag { @@ -41,7 +44,6 @@ public interface Tag { public final static int TAG_LENGTH_SIZE = Bytes.SIZEOF_SHORT; public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE; public static final int MAX_TAG_LENGTH = (2 * Short.MAX_VALUE) + 1 - TAG_LENGTH_SIZE; - /** * @return the tag type */ @@ -75,4 +77,178 @@ public interface Tag { * @return The {@link java.nio.ByteBuffer} containing the value bytes. */ ByteBuffer getValueByteBuffer(); + + /** + * Returns tag value in a new byte array. Primarily for use client-side. If server-side, use + * {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()} and + * {@link Tag#getValueLength()} instead to save on allocations. + * @param tag The Tag whose value to be returned + * @return tag value in a new byte array. + */ + public static byte[] cloneValue(Tag tag) { + int tagLength = tag.getValueLength(); + byte[] tagArr = new byte[tagLength]; + if (tag.hasArray()) { + Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength); + } else { + ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(), + 0, tagLength); + } + return tagArr; + } + + /** + * Converts the value bytes of the given tag into a String value + * @param tag The Tag + * @return value as String + */ + public static String getValueAsString(Tag tag) { + if (tag.hasArray()) { + return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); + } + return Bytes.toString(cloneValue(tag)); + } + + /** + * Matches the value part of given tags + * @param t1 Tag to match the value + * @param t2 Tag to match the value + * @return True if values of both tags are same. + */ + public static boolean matchingValue(Tag t1, Tag t2) { + if (t1.hasArray() && t2.hasArray()) { + return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(), + t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); + } + if (t1.hasArray()) { + return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(), + t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength()); + } + if (t2.hasArray()) { + return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), + t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); + } + return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), t1.getValueLength(), + t2.getValueByteBuffer(), t2.getValueOffset(), t2.getValueLength()); + } + + /** + * Copies the tag's value bytes to the given byte array + * @param tag The Tag + * @param out The byte array where to copy the Tag value. + * @param offset The offset within 'out' array where to copy the Tag value. + */ + public static void copyValueTo(Tag tag, byte[] out, int offset) { + if (tag.hasArray()) { + Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); + } else { + ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(), + offset, tag.getValueLength()); + } + } + + /** + * Creates list of tags from given byte array, expected that it is in the expected tag format. + * @param b The byte array + * @param offset The offset in array where tag bytes begin + * @param length Total length of all tags bytes + * @return List of tags + */ + public static List asList(byte[] b, int offset, int length) { + List tags = new ArrayList<>(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE); + tags.add(new ArrayBackedTag(b, pos, tagLen + TAG_LENGTH_SIZE)); + pos += TAG_LENGTH_SIZE + tagLen; + } + return tags; + } + + /** + * Creates list of tags from given Cell + * @param cell the cell to extract the tags + * @return List of tags + */ + public static List asList(Cell cell) { + if (cell instanceof ByteBufferCell) { + return asList(((ByteBufferCell) cell).getTagsByteBuffer(), + ((ByteBufferCell) cell).getTagsPosition(), cell.getTagsLength()); + } else { + return asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + } + } + + /** + * Creates list of tags from given ByteBuffer, expected that it is in the expected tag format. + * @param b The ByteBuffer + * @param offset The offset in ByteBuffer where tag bytes begin + * @param length Total length of all tags bytes + * @return List of tags + */ + public static List asList(ByteBuffer b, int offset, int length) { + List tags = new ArrayList<>(); + int pos = offset; + while (pos < offset + length) { + int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE); + tags.add(new ByteBufferTag(b, pos, tagLen + TAG_LENGTH_SIZE)); + pos += TAG_LENGTH_SIZE + tagLen; + } + return tags; + } + + /** + * Write a list of tags into a byte array + * @param tags The list of tags + * @return the serialized tag data as bytes + */ + public static byte[] fromList(List tags) { + if (tags == null || tags.isEmpty()) { + return HConstants.EMPTY_BYTE_ARRAY; + } + int length = 0; + for (Tag tag : tags) { + length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE; + } + byte[] b = new byte[length]; + int pos = 0; + int tlen; + for (Tag tag : tags) { + tlen = tag.getValueLength(); + pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE); + pos = Bytes.putByte(b, pos, tag.getType()); + if (tag.hasArray()) { + pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen); + } else { + ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(), + pos, tlen); + pos += tlen; + } + } + return b; + } + + /** + * Converts the value bytes of the given tag into a long value + * @param tag The Tag + * @return value as long + */ + public static long getValueAsLong(Tag tag) { + if (tag.hasArray()) { + return Bytes.toLong(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); + } + return ByteBufferUtils.toLong(tag.getValueByteBuffer(), tag.getValueOffset()); + } + + /** + * Converts the value bytes of the given tag into a byte value + * @param tag The Tag + * @return value as byte + */ + public static byte getValueAsByte(Tag tag) { + if (tag.hasArray()) { + return tag.getValueArray()[tag.getValueOffset()]; + } + return ByteBufferUtils.toByte(tag.getValueByteBuffer(), tag.getValueOffset()); + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java index 71a2fbb..5a51f82 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java @@ -20,8 +20,13 @@ package org.apache.hadoop.hbase; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -@InterfaceAudience.Private +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @InterfaceStability.Evolving +/** + * The byte represeting unique tag types + * Note : CP users are suggested to use higher values and restrict from use lower + * byte values for the types. + */ public final class TagType { // Please declare new Tag Types here to avoid step on pre-existing tag types. public static final byte ACL_TAG_TYPE = (byte) 1; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java index a4962f4..203e159 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java @@ -17,197 +17,23 @@ */ package org.apache.hadoop.hbase; -import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; - import java.io.IOException; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private -public final class TagUtil { +public class TagUtil { - // If you would like to check the length of tags, please call {@link TagUtil#checkForTagsLength()}. - private static final int MAX_TAGS_LENGTH = (2 * Short.MAX_VALUE) + 1; - - /** - * Private constructor to keep this class from being instantiated. - */ private TagUtil(){} /** - * Returns tag value in a new byte array. - * Primarily for use client-side. If server-side, use - * {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()} - * and {@link Tag#getValueLength()} instead to save on allocations. - * - * @param tag The Tag whose value to be returned - * @return tag value in a new byte array. - */ - public static byte[] cloneValue(Tag tag) { - int tagLength = tag.getValueLength(); - byte[] tagArr = new byte[tagLength]; - if (tag.hasArray()) { - Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength); - } else { - ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(), - 0, tagLength); - } - return tagArr; - } - - /** - * Creates list of tags from given byte array, expected that it is in the expected tag format. - * - * @param b The byte array - * @param offset The offset in array where tag bytes begin - * @param length Total length of all tags bytes - * @return List of tags - */ - public static List asList(byte[] b, int offset, int length) { - List tags = new ArrayList<>(); - int pos = offset; - while (pos < offset + length) { - int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE); - tags.add(new ArrayBackedTag(b, pos, tagLen + TAG_LENGTH_SIZE)); - pos += TAG_LENGTH_SIZE + tagLen; - } - return tags; - } - - /** - * Creates list of tags from given ByteBuffer, expected that it is in the expected tag format. - * - * @param b The ByteBuffer - * @param offset The offset in ByteBuffer where tag bytes begin - * @param length Total length of all tags bytes - * @return List of tags - */ - public static List asList(ByteBuffer b, int offset, int length) { - List tags = new ArrayList<>(); - int pos = offset; - while (pos < offset + length) { - int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE); - tags.add(new ByteBufferTag(b, pos, tagLen + TAG_LENGTH_SIZE)); - pos += TAG_LENGTH_SIZE + tagLen; - } - return tags; - } - - /** - * Write a list of tags into a byte array - * - * @param tags The list of tags - * @return the serialized tag data as bytes - */ - public static byte[] fromList(List tags) { - if (tags == null || tags.isEmpty()) { - return HConstants.EMPTY_BYTE_ARRAY; - } - int length = 0; - for (Tag tag : tags) { - length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE; - } - byte[] b = new byte[length]; - int pos = 0; - int tlen; - for (Tag tag : tags) { - tlen = tag.getValueLength(); - pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE); - pos = Bytes.putByte(b, pos, tag.getType()); - if (tag.hasArray()) { - pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen); - } else { - ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(), - pos, tlen); - pos += tlen; - } - } - return b; - } - - /** - * Converts the value bytes of the given tag into a long value - * @param tag The Tag - * @return value as long - */ - public static long getValueAsLong(Tag tag) { - if (tag.hasArray()) { - return Bytes.toLong(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); - } - return ByteBufferUtils.toLong(tag.getValueByteBuffer(), tag.getValueOffset()); - } - - /** - * Converts the value bytes of the given tag into a byte value - * @param tag The Tag - * @return value as byte - */ - public static byte getValueAsByte(Tag tag) { - if (tag.hasArray()) { - return tag.getValueArray()[tag.getValueOffset()]; - } - return ByteBufferUtils.toByte(tag.getValueByteBuffer(), tag.getValueOffset()); - } - - /** - * Converts the value bytes of the given tag into a String value - * @param tag The Tag - * @return value as String - */ - public static String getValueAsString(Tag tag){ - if(tag.hasArray()){ - return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); - } - return Bytes.toString(cloneValue(tag)); - } - - /** - * Matches the value part of given tags - * @param t1 Tag to match the value - * @param t2 Tag to match the value - * @return True if values of both tags are same. - */ - public static boolean matchingValue(Tag t1, Tag t2) { - if (t1.hasArray() && t2.hasArray()) { - return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(), - t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); - } - if (t1.hasArray()) { - return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(), - t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength()); - } - if (t2.hasArray()) { - return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), - t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); - } - return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), t1.getValueLength(), - t2.getValueByteBuffer(), t2.getValueOffset(), t2.getValueLength()); - } - - /** - * Copies the tag's value bytes to the given byte array - * @param tag The Tag - * @param out The byte array where to copy the Tag value. - * @param offset The offset within 'out' array where to copy the Tag value. - */ - public static void copyValueTo(Tag tag, byte[] out, int offset) { - if (tag.hasArray()) { - Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); - } else { - ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(), - offset, tag.getValueLength()); - } - } - - /** * Reads an int value stored as a VInt at tag's given offset. * @param tag The Tag * @param offset The offset where VInt bytes begin @@ -313,16 +139,4 @@ public final class TagUtil { throw new UnsupportedOperationException(); } }; - - /** - * Check the length of tags. If it is invalid, throw IllegalArgumentException - * - * @param tagsLength - * @throws IllegalArgumentException if tagslength is invalid - */ - public static void checkForTagsLength(int tagsLength) { - if (tagsLength > MAX_TAGS_LENGTH) { - throw new IllegalArgumentException("tagslength "+ tagsLength + " > " + MAX_TAGS_LENGTH); - } - } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java index c5ce8de..5b1bde9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java @@ -158,16 +158,19 @@ public class TestByteBufferKeyValue { assertEquals(0L, offheapKV.getTimestamp()); assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); // change tags to handle both onheap and offheap stuff - List resTags = TagUtil.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(), - offheapKV.getTagsLength()); + List resTags = Tag.asList(offheapKV.getTagsArray(), + offheapKV.getTagsOffset(), offheapKV.getTagsLength()); Tag tag1 = resTags.get(0); assertEquals(t1.getType(), tag1.getType()); - assertEquals(TagUtil.getValueAsString(t1), TagUtil.getValueAsString(tag1)); + assertEquals(Tag.getValueAsString(t1), + Tag.getValueAsString(tag1)); Tag tag2 = resTags.get(1); assertEquals(tag2.getType(), tag2.getType()); - assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2)); + assertEquals(Tag.getValueAsString(t2), + Tag.getValueAsString(tag2)); Tag res = PrivateCellUtil.getTag(offheapKV, (byte) 2); - assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2)); + assertEquals(Tag.getValueAsString(t2), + Tag.getValueAsString(tag2)); res = PrivateCellUtil.getTag(offheapKV, (byte) 3); assertNull(res); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 4ff4f05..45adc98 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -459,11 +459,11 @@ public class TestKeyValue extends TestCase { boolean meta1Ok = false, meta2Ok = false; for (Tag tag : tags) { if (tag.getType() == (byte) 1) { - if (Bytes.equals(TagUtil.cloneValue(tag), metaValue1)) { + if (Bytes.equals(Tag.cloneValue(tag), metaValue1)) { meta1Ok = true; } } else { - if (Bytes.equals(TagUtil.cloneValue(tag), metaValue2)) { + if (Bytes.equals(Tag.cloneValue(tag), metaValue2)) { meta2Ok = true; } } @@ -476,12 +476,12 @@ public class TestKeyValue extends TestCase { Tag next = tagItr.next(); assertEquals(10, next.getValueLength()); assertEquals((byte) 1, next.getType()); - Bytes.equals(TagUtil.cloneValue(next), metaValue1); + Bytes.equals(Tag.cloneValue(next), metaValue1); assertTrue(tagItr.hasNext()); next = tagItr.next(); assertEquals(10, next.getValueLength()); assertEquals((byte) 2, next.getType()); - Bytes.equals(TagUtil.cloneValue(next), metaValue2); + Bytes.equals(Tag.cloneValue(next), metaValue2); assertFalse(tagItr.hasNext()); tagItr = PrivateCellUtil.tagsIterator(kv); @@ -489,12 +489,12 @@ public class TestKeyValue extends TestCase { next = tagItr.next(); assertEquals(10, next.getValueLength()); assertEquals((byte) 1, next.getType()); - Bytes.equals(TagUtil.cloneValue(next), metaValue1); + Bytes.equals(Tag.cloneValue(next), metaValue1); assertTrue(tagItr.hasNext()); next = tagItr.next(); assertEquals(10, next.getValueLength()); assertEquals((byte) 2, next.getType()); - Bytes.equals(TagUtil.cloneValue(next), metaValue2); + Bytes.equals(Tag.cloneValue(next), metaValue2); assertFalse(tagItr.hasNext()); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java index d7894f4..f43a4e0 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java @@ -37,13 +37,13 @@ public class TestTagUtil { assertEquals(1, tags.size()); Tag ttlTag = tags.get(0); assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType()); - assertEquals(ttl, TagUtil.getValueAsLong(ttlTag)); + assertEquals(ttl, Tag.getValueAsLong(ttlTag)); // Already having a TTL tag in the list. So the call must remove the old tag long ttl2 = 30 * 1000; tags = TagUtil.carryForwardTTLTag(tags, ttl2); assertEquals(1, tags.size()); ttlTag = tags.get(0); assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType()); - assertEquals(ttl2, TagUtil.getValueAsLong(ttlTag)); + assertEquals(ttl2, Tag.getValueAsLong(ttlTag)); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index 1b7302f..c98f167 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -28,22 +28,20 @@ import java.io.DataOutputStream; import java.io.IOException; import java.util.List; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; - @Category({MiscTests.class, SmallTests.class}) public class TestCellCodecWithTags { @@ -79,36 +77,37 @@ public class TestCellCodecWithTags { assertTrue(decoder.advance()); Cell c = decoder.current(); assertTrue(CellUtil.equals(c, cell1)); - List tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + List tags = + Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(2, tags.size()); Tag tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), Tag.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), Tag.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, cell2)); - tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(1, tags.size()); tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), Tag.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, cell3)); - tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(3, tags.size()); tag = tags.get(0); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), Tag.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), Tag.cloneValue(tag))); tag = tags.get(2); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), Tag.cloneValue(tag))); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index badf048..19b18c9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -28,22 +28,20 @@ import java.io.DataOutputStream; import java.io.IOException; import java.util.List; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; - @Category({MiscTests.class, SmallTests.class}) public class TestKeyValueCodecWithTags { @@ -79,36 +77,37 @@ public class TestKeyValueCodecWithTags { assertTrue(decoder.advance()); Cell c = decoder.current(); assertTrue(CellUtil.equals(c, kv1)); - List tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + List tags = + Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(2, tags.size()); Tag tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), Tag.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), Tag.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, kv2)); - tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(1, tags.size()); tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), Tag.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, kv3)); - tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(3, tags.size()); tag = tags.get(0); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), Tag.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), Tag.cloneValue(tag))); tag = tags.get(2); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), Tag.cloneValue(tag))); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java index 62a7306..31ad975 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java @@ -65,7 +65,6 @@ import org.apache.hadoop.hbase.PerformanceEvaluation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -94,7 +93,6 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.ReflectionUtils; -import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -492,7 +490,7 @@ public class TestCellBasedHFileOutputFormat2 { HFileScanner scanner = reader.getScanner(false, false, false); scanner.seekTo(); Cell cell = scanner.getCell(); - List tagsFromCell = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tagsFromCell = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); assertTrue(tagsFromCell.size() > 0); for (Tag tag : tagsFromCell) { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java index f504702..87bd30a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java @@ -65,7 +65,6 @@ import org.apache.hadoop.hbase.PerformanceEvaluation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -492,7 +491,7 @@ public class TestHFileOutputFormat2 { HFileScanner scanner = reader.getScanner(false, false, false); scanner.seekTo(); Cell cell = scanner.getCell(); - List tagsFromCell = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tagsFromCell = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); assertTrue(tagsFromCell.size() > 0); for (Tag tag : tagsFromCell) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java index 05df33d..d3a1814 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java @@ -21,7 +21,9 @@ package org.apache.hadoop.hbase.coprocessor; import java.util.concurrent.ConcurrentMap; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Connection; @@ -85,4 +87,12 @@ public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); for (Tag tag : tags) { out.print(String.format(" T[%d]: %s", i++, tag.toString())); @@ -442,7 +441,7 @@ public class HFilePrettyPrinter extends Configured implements Tool { System.err.println("ERROR, wrong value format in mob reference cell " + CellUtil.getCellKeyAsString(cell)); } else { - TableName tn = TableName.valueOf(TagUtil.cloneValue(tnTag)); + TableName tn = TableName.valueOf(Tag.cloneValue(tnTag)); String mobFileName = MobUtils.getMobFileName(cell); boolean exist = mobFileExists(fs, tn, mobFileName, Bytes.toString(CellUtil.cloneFamily(cell)), foundMobFiles, missingMobFiles); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java index 8407783..1e7a694 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java @@ -101,7 +101,7 @@ public final class MobUtils { static { List tags = new ArrayList<>(); tags.add(MobConstants.MOB_REF_TAG); - REF_DELETE_MARKER_TAG_BYTES = TagUtil.fromList(tags); + REF_DELETE_MARKER_TAG_BYTES = Tag.fromList(tags); } /** @@ -496,7 +496,7 @@ public final class MobUtils { // find the original mob files by this table name. For details please see cloning // snapshot for mob files. tags.add(tableNameTag); - return createMobRefCell(cell, fileName, TagUtil.fromList(tags)); + return createMobRefCell(cell, fileName, Tag.fromList(tags)); } public static Cell createMobRefCell(Cell cell, byte[] fileName, byte[] refCellTags) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index 3064723..0cccfa3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -54,7 +54,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -84,14 +83,13 @@ import org.apache.hadoop.hbase.regionserver.StoreFileScanner; import org.apache.hadoop.hbase.regionserver.StoreFileWriter; import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.security.EncryptionUtil; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; - /** * An implementation of {@link MobCompactor} that compacts the mob files in partitions. */ @@ -133,7 +131,7 @@ public class PartitionedMobCompactor extends MobCompactor { tags.add(MobConstants.MOB_REF_TAG); Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName()); tags.add(tableNameTag); - this.refCellTags = TagUtil.fromList(tags); + this.refCellTags = Tag.fromList(tags); cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java index 5cb1e45..5db7383 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java @@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; @@ -121,7 +120,7 @@ public class HMobStore extends HStore { Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, getTableName().getName()); tags.add(tableNameTag); - this.refCellTags = TagUtil.fromList(tags); + this.refCellTags = Tag.fromList(tags); } /** @@ -332,7 +331,7 @@ public class HMobStore extends HStore { String fileName = MobUtils.getMobFileName(reference); Tag tableNameTag = MobUtils.getTableNameTag(reference); if (tableNameTag != null) { - String tableNameString = TagUtil.getValueAsString(tableNameTag); + String tableNameString = Tag.getValueAsString(tableNameTag); List locations = map.get(tableNameString); if (locations == null) { IdLock.Entry lockEntry = keyLock.getLockEntry(tableNameString.hashCode()); @@ -359,12 +358,15 @@ public class HMobStore extends HStore { + "qualifier,timestamp,type and tags but with an empty value to return."); result = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) .setRow(reference.getRowArray(), reference.getRowOffset(), reference.getRowLength()) - .setFamily(reference.getFamilyArray(), reference.getFamilyOffset(), reference.getFamilyLength()) - .setQualifier(reference.getQualifierArray(), reference.getQualifierOffset(), reference.getQualifierLength()) + .setFamily(reference.getFamilyArray(), reference.getFamilyOffset(), + reference.getFamilyLength()) + .setQualifier(reference.getQualifierArray(), + reference.getQualifierOffset(), reference.getQualifierLength()) .setTimestamp(reference.getTimestamp()) .setType(reference.getTypeByte()) .setValue(HConstants.EMPTY_BYTE_ARRAY) - .setTags(reference.getTagsArray(), reference.getTagsOffset(), reference.getTagsLength()) + .setTags(reference.getTagsArray(), reference.getTagsOffset(), + reference.getTagsLength()) .build(); } return result; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index d3cd087..266719a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -91,11 +91,11 @@ import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.HDFSBlocksDistribution; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.RegionTooBusyException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -149,6 +149,28 @@ import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.regionserver.wal.WALUtil; import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall; +import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad; +import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId; +import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; +import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos; +import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor; +import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor; +import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction; +import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor; +import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor; +import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType; +import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotManifest; import org.apache.hadoop.hbase.util.Bytes; @@ -7714,7 +7736,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi .setTimestamp(Math.max(currentCell.getTimestamp() + 1, now)) .setType(KeyValue.Type.Put.getCode()) .setValue(newValue, 0, newValue.length) - .setTags(TagUtil.fromList(tags)) + .setTags(Tag.fromList(tags)) .build(); } else { PrivateCellUtil.updateLatestStamp(delta, now); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index 2c8d9a6..1e1714c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hbase.regionserver; -import com.google.protobuf.Message; -import com.google.protobuf.Service; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -38,8 +35,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; @@ -79,6 +79,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTrack import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker; import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CoprocessorClassLoader; import org.apache.hadoop.hbase.util.Pair; @@ -86,7 +87,8 @@ import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import com.google.protobuf.Message; +import com.google.protobuf.Service; /** * Implements the coprocessor environment and runtime support for coprocessors @@ -177,6 +179,12 @@ public class RegionCoprocessorHost public MetricRegistry getMetricRegistryForRegionServer() { return metricRegistry; } + + @Override + public ExtendedCellBuilder getCellBuilder(CellBuilderType type) { + // do not allow seqId update + return ExtendedCellBuilderFactory.create(type, false); + } } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java index ae893cb..c164afd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java @@ -25,14 +25,12 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; @@ -43,6 +41,7 @@ import org.apache.hadoop.hbase.security.visibility.VisibilityNewVersionBehaivorT import org.apache.hadoop.hbase.security.visibility.VisibilityScanDeleteTracker; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; +import org.apache.yetus.audience.InterfaceAudience; /** * A query matcher that is specifically designed for the scan case. @@ -157,7 +156,7 @@ public abstract class ScanQueryMatcher implements ShipperListener { // to convert long ts = cell.getTimestamp(); assert t.getValueLength() == Bytes.SIZEOF_LONG; - long ttl = TagUtil.getValueAsLong(t); + long ttl = Tag.getValueAsLong(t); if (ts + ttl < now) { return true; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index 4e67f6e..ecd937b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -18,13 +18,6 @@ package org.apache.hadoop.hbase.security.access; -import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.PrivateCellUtil; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; - import java.io.ByteArrayInputStream; import java.io.DataInput; import java.io.DataInputStream; @@ -45,12 +38,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; @@ -69,12 +62,16 @@ import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Maintains lists of permission grants to users and groups to allow for @@ -752,7 +749,7 @@ public class AccessControlLists { if (tag.hasArray()) { ProtobufUtil.mergeFrom(builder, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); } else { - ProtobufUtil.mergeFrom(builder, TagUtil.cloneValue(tag)); + ProtobufUtil.mergeFrom(builder, Tag.cloneValue(tag)); } ListMultimap kvPerms = AccessControlUtil.toUsersAndPermissions(builder.build()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 6ee9f60..edd8243 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -18,11 +18,6 @@ */ package org.apache.hadoop.hbase.security.access; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; - import java.io.IOException; import java.net.InetAddress; import java.security.PrivilegedExceptionAction; @@ -52,10 +47,10 @@ import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -123,6 +118,13 @@ import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.access.Permission.Action; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.Bytes; @@ -133,13 +135,10 @@ import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; +import com.google.protobuf.Message; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; /** * Provides basic authorization checks for data access and administrative diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java index e913b21..b1d5d9d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java @@ -51,12 +51,11 @@ import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.util.StreamUtils; @@ -68,6 +67,7 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService { @@ -606,7 +606,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService for (Tag tag : deleteVisTags) { matchFound = false; for (Tag givenTag : putVisTags) { - if (TagUtil.matchingValue(tag, givenTag)) { + if (Tag.matchingValue(tag, givenTag)) { matchFound = true; break; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index cf6375b..af7e632 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -23,11 +23,6 @@ import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SUCCESS; import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY; import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; - import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; @@ -49,12 +44,11 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.MetaTableAccessor; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; @@ -109,13 +103,17 @@ import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.AccessController; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; +import com.google.protobuf.ByteString; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; /** * Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in @@ -321,7 +319,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso Tag tag = pair.getSecond(); if (cellVisibility == null && tag != null) { // May need to store only the first one - cellVisibility = new CellVisibility(TagUtil.getValueAsString(tag)); + cellVisibility = new CellVisibility(Tag.getValueAsString(tag)); modifiedTagFound = true; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java index 3fb66b8..62a3b73 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.security.visibility; import static org.apache.hadoop.hbase.TagType.VISIBILITY_TAG_TYPE; -import com.google.protobuf.ByteString; - import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; @@ -39,11 +37,9 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.Filter; @@ -67,6 +63,8 @@ import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; +import com.google.protobuf.ByteString; + /** * Utility method to support visibility */ @@ -217,7 +215,7 @@ public class VisibilityUtils { while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { - serializationFormat = TagUtil.getValueAsByte(tag); + serializationFormat = Tag.getValueAsByte(tag); } else if (tag.getType() == VISIBILITY_TAG_TYPE) { tags.add(tag); } @@ -244,7 +242,7 @@ public class VisibilityUtils { while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { - serializationFormat = TagUtil.getValueAsByte(tag); + serializationFormat = Tag.getValueAsByte(tag); } else if (tag.getType() == VISIBILITY_TAG_TYPE) { visTags.add(tag); } else { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java index 839ea31..113df0e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java @@ -27,7 +27,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.HelpFormatter; @@ -43,12 +42,13 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +import com.fasterxml.jackson.databind.ObjectMapper; /** * WALPrettyPrinter prints the contents of a given WAL with a variety of @@ -343,7 +343,8 @@ public class WALPrettyPrinter { Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); - tagsString.add((tag.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(tag))); + tagsString + .add((tag.getType()) + ":" + Bytes.toStringBinary(Tag.cloneValue(tag))); } stringMap.put("tag", tagsString); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java index d0a3fd4..eae08c4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java @@ -17,9 +17,13 @@ * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; + +import org.apache.yetus.audience.InterfaceAudience; + /** * Used in testcases only. */ +@InterfaceAudience.Private public enum TagUsage { // No tags would be added NO_TAG, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index e594629..b08c6ad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -18,37 +18,35 @@ */ package org.apache.hadoop.hbase.io.hfile; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import org.junit.Before; -import org.junit.Test; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.ArrayBackedTag; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ByteBufferKeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -176,7 +174,7 @@ public class TestSeekTo { Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag next = tagsIterator.next(); - assertEquals("myTag1", Bytes.toString(TagUtil.cloneValue(next))); + assertEquals("myTag1", Bytes.toString(Tag.cloneValue(next))); } } assertTrue(scanner.seekBefore(toKV("k", tagUsage))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java index 5de440d..52ae0ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java @@ -20,20 +20,22 @@ package org.apache.hadoop.hbase.protobuf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; @@ -42,7 +44,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Col import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; -import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -333,7 +334,8 @@ public class TestProtobufUtil { dbb.put(arr); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); - Cell newOffheapKV = ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); + Cell newOffheapKV = + ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); assertTrue(CellComparatorImpl.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java index 3d3c79c..4a3d8d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; @@ -90,7 +89,7 @@ public class TestStoreFileScannerWithTagCompression { kv.getRowLength())); List tags = KeyValueUtil.ensureKeyValue(kv).getTags(); assertEquals(1, tags.size()); - assertEquals("tag3", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag3", Bytes.toString(Tag.cloneValue(tags.get(0)))); } finally { s.close(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java index 479d98f..1eab2ed 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java @@ -26,6 +26,7 @@ import java.util.List; import java.util.Optional; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; @@ -37,8 +38,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.CompactionState; @@ -56,10 +55,10 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WALEdit; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -406,7 +405,7 @@ public class TestTags { List tags = TestCoprocessorForTags.tags; assertEquals(3L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(1, tags.size()); - assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag1", Bytes.toString(Tag.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -424,7 +423,7 @@ public class TestTags { // We cannot assume the ordering of tags List tagValues = new ArrayList<>(); for (Tag tag: tags) { - tagValues.add(Bytes.toString(TagUtil.cloneValue(tag))); + tagValues.add(Bytes.toString(Tag.cloneValue(tag))); } assertTrue(tagValues.contains("tag1")); assertTrue(tagValues.contains("tag2")); @@ -446,7 +445,7 @@ public class TestTags { tags = TestCoprocessorForTags.tags; assertEquals(4L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(1, tags.size()); - assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag2", Bytes.toString(Tag.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -465,7 +464,7 @@ public class TestTags { kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(1, tags.size()); - assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag1", Bytes.toString(Tag.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -482,7 +481,7 @@ public class TestTags { // We cannot assume the ordering of tags tagValues.clear(); for (Tag tag: tags) { - tagValues.add(Bytes.toString(TagUtil.cloneValue(tag))); + tagValues.add(Bytes.toString(Tag.cloneValue(tag))); } assertTrue(tagValues.contains("tag1")); assertTrue(tagValues.contains("tag2")); @@ -503,7 +502,7 @@ public class TestTags { kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(1, tags.size()); - assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag2", Bytes.toString(Tag.cloneValue(tags.get(0)))); } finally { TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -613,7 +612,7 @@ public class TestTags { CellScanner cellScanner = result.cellScanner(); if (cellScanner.advance()) { Cell cell = cellScanner.current(); - tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java index 8a246be..1c5bb28 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java @@ -28,12 +28,11 @@ import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.codec.Codec.Decoder; import org.apache.hadoop.hbase.codec.Codec.Encoder; import org.apache.hadoop.hbase.io.util.LRUDictionary; @@ -84,7 +83,7 @@ public class TestWALCellCodecWithCompression { KeyValue kv = (KeyValue) decoder.current(); List tags = kv.getTags(); assertEquals(1, tags.size()); - assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tagValue1", Bytes.toString(Tag.cloneValue(tags.get(0)))); decoder.advance(); kv = (KeyValue) decoder.current(); tags = kv.getTags(); @@ -93,8 +92,8 @@ public class TestWALCellCodecWithCompression { kv = (KeyValue) decoder.current(); tags = kv.getTags(); assertEquals(2, tags.size()); - assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); - assertEquals("tagValue2", Bytes.toString(TagUtil.cloneValue(tags.get(1)))); + assertEquals("tagValue1", Bytes.toString(Tag.cloneValue(tags.get(0)))); + assertEquals("tagValue2", Bytes.toString(Tag.cloneValue(tags.get(1)))); } private KeyValue createKV(int noOfTags) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index e2a393a..c3307e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -29,6 +29,7 @@ import java.util.Optional; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -40,8 +41,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -57,10 +56,10 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; -import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -252,7 +251,8 @@ public class TestReplicationWithTags { // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { Cell cell = results.get(0); - tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index f116b30..7c327aa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -36,9 +36,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.CoordinatedStateManager; +import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.Server; @@ -329,6 +331,11 @@ public class TestTokenAuthentication { public Connection getConnection() { return null; } + + @Override + public ExtendedCellBuilder getCellBuilder(CellBuilderType type) { + return null; + } }); started = true; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index 93bbc42..be99254 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -35,16 +35,13 @@ import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; @@ -63,6 +60,7 @@ import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionN import org.apache.hadoop.hbase.security.visibility.expression.Operator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * This is a VisibilityLabelService where labels in Mutation's visibility @@ -430,7 +428,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer for (Tag tag : deleteVisTags) { matchFound = false; for (Tag givenTag : putVisTags) { - if (TagUtil.matchingValue(tag, givenTag)) { + if (Tag.matchingValue(tag, givenTag)) { matchFound = true; break; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java index e5b1c39..41326e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java @@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -181,7 +180,7 @@ public class TestVisibilityLabelReplicationWithExpAsString extends TestVisibilit boolean foundNonVisTag = false; for(Tag t : TestCoprocessorForTagsAtSink.tags) { if(t.getType() == NON_VIS_TAG_TYPE) { - assertEquals(TEMP, Bytes.toString(TagUtil.cloneValue(t))); + assertEquals(TEMP, Bytes.toString(Tag.cloneValue(t))); foundNonVisTag = true; break; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java index 513c765..bb4df1c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java @@ -32,6 +32,7 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; @@ -40,14 +41,12 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -65,7 +64,6 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; -import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.security.User; @@ -73,6 +71,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; +import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.junit.Assert; @@ -285,11 +284,11 @@ public class TestVisibilityLabelsReplication { for (Cell cell : cells) { if ((Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0, row.length))) { - List tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); for (Tag tag : tags) { if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) { - assertEquals(visTag, TagUtil.getValueAsString(tag)); + assertEquals(visTag, Tag.getValueAsString(tag)); tagFound = true; break; } @@ -331,7 +330,7 @@ public class TestVisibilityLabelsReplication { boolean foundNonVisTag = false; for (Tag t : TestCoprocessorForTagsAtSink.tags) { if (t.getType() == NON_VIS_TAG_TYPE) { - assertEquals(TEMP, TagUtil.getValueAsString(t)); + assertEquals(TEMP, Tag.getValueAsString(t)); foundNonVisTag = true; break; } @@ -443,7 +442,8 @@ public class TestVisibilityLabelsReplication { // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { Cell cell = results.get(0); - tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java index ada3ba5..6100078 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java @@ -31,13 +31,11 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -166,7 +164,7 @@ public class HFileTestUtil { fail(c.toString() + " has null tag"); continue; } - byte[] tval = TagUtil.cloneValue(t); + byte[] tval = Tag.cloneValue(t); assertArrayEquals(c.toString() + " has tag" + Bytes.toString(tval), r.getRow(), tval); } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 4e15a9c..3c39d6a 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -27,15 +27,14 @@ import java.util.List; import java.util.Map; import org.apache.commons.collections4.MapUtils; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.ServerName; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan.ReadType; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.CellVisibility; @@ -71,6 +69,7 @@ import org.apache.hadoop.hbase.thrift2.generated.TScan; import org.apache.hadoop.hbase.thrift2.generated.TServerName; import org.apache.hadoop.hbase.thrift2.generated.TTimeRange; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class ThriftUtilities { @@ -172,7 +171,7 @@ public class ThriftUtilities { col.setTimestamp(kv.getTimestamp()); col.setValue(CellUtil.cloneValue(kv)); if (kv.getTagsLength() > 0) { - col.setTags(PrivateCellUtil.getTagsArray(kv)); + col.setTags(PrivateCellUtil.cloneTags(kv)); } columnValues.add(col); }