diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java index dd2c2f0..9a5b65b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java @@ -33,11 +33,12 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.HeapSize; @@ -226,11 +227,12 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C c.getQualifierLength())); stringMap.put("timestamp", c.getTimestamp()); stringMap.put("vlen", c.getValueLength()); - List tags = PrivateCellUtil.getTags(c); + List tags = LimitedPrivateCellUtil.getTags(c); if (tags != null) { List tagsString = new ArrayList<>(tags.size()); for (Tag t : tags) { - tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(t))); + tagsString + .add((t.getType()) + ":" + Bytes.toStringBinary(LimitedPrivateTagUtil.cloneValue(t))); } stringMap.put("tag", tagsString); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index a45629f..5480068 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; @@ -44,7 +45,8 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.TagCellBuilder; +import org.apache.hadoop.hbase.TagCellBuilderFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Consistency; @@ -494,7 +496,7 @@ public final class ProtobufUtil { throw new IllegalArgumentException("row cannot be null"); } // The proto has the metadata and the data itself - ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); + TagCellBuilder cellBuilder = TagCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); for (ColumnValue column: proto.getColumnValueList()) { byte[] family = column.getFamily().toByteArray(); for (QualifierValue qv: column.getQualifierValueList()) { @@ -523,7 +525,8 @@ public final class ProtobufUtil { .setTags(allTagsBytes) .build()); } else { - List tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); + List tags = + LimitedPrivateTagUtil.asList(allTagsBytes, 0, (short) allTagsBytes.length); Tag[] tagsArray = new Tag[tags.size()]; put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray)); } @@ -1642,6 +1645,17 @@ public final class ProtobufUtil { .build(); } + public static Cell toCell(TagCellBuilder cellBuilder, final CellProtos.Cell cell) { + return cellBuilder.clear() + .setRow(cell.getRow().toByteArray()) + .setFamily(cell.getFamily().toByteArray()) + .setQualifier(cell.getQualifier().toByteArray()) + .setTimestamp(cell.getTimestamp()) + .setType((byte) cell.getCellType().getNumber()) + .setValue(cell.getValue().toByteArray()) + .build(); + } + /** * Print out some subset of a MutationProto rather than all of it and its data * @param proto Protobuf to print out diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index c7450b4..675889e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -43,14 +43,13 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; -import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus.Option; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; @@ -63,8 +62,8 @@ import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.TagCellBuilder; +import org.apache.hadoop.hbase.TagCellBuilderFactory; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.ClientUtil; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; @@ -81,7 +80,6 @@ import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.client.RegionLoadStats; -import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.SnapshotDescription; @@ -188,6 +186,7 @@ import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.ipc.RemoteException; +import org.apache.yetus.audience.InterfaceAudience; /** * Protobufs utility. @@ -631,7 +630,7 @@ public final class ProtobufUtil { throw new IllegalArgumentException("row cannot be null"); } // The proto has the metadata and the data itself - ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); + TagCellBuilder cellBuilder = TagCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); for (ColumnValue column: proto.getColumnValueList()) { byte[] family = column.getFamily().toByteArray(); for (QualifierValue qv: column.getQualifierValueList()) { @@ -660,7 +659,8 @@ public final class ProtobufUtil { .setTags(allTagsBytes) .build()); } else { - List tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); + List tags = + LimitedPrivateTagUtil.asList(allTagsBytes, 0, (short) allTagsBytes.length); Tag[] tagsArray = new Tag[tags.size()]; put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray)); } @@ -812,7 +812,7 @@ public final class ProtobufUtil { if (qv.hasTags()) { tags = qv.getTags().toByteArray(); } - consumer.accept(mutation, ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) + consumer.accept(mutation, TagCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) .setRow(mutation.getRow()) .setFamily(family) .setQualifier(qualifier) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index c2fb869..9bf8f56 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -134,7 +134,7 @@ public final class CellUtil { @Deprecated public static byte[] cloneTags(Cell cell) { byte[] output = new byte[cell.getTagsLength()]; - PrivateCellUtil.copyTagsTo(cell, output, 0); + LimitedPrivateCellUtil.copyTagsTo(cell, output, 0); return output; } @@ -149,7 +149,7 @@ public final class CellUtil { @Deprecated public static byte[] getTagArray(Cell cell) { byte[] output = new byte[cell.getTagsLength()]; - PrivateCellUtil.copyTagsTo(cell, output, 0); + LimitedPrivateCellUtil.copyTagsTo(cell, output, 0); return output; } @@ -593,7 +593,7 @@ public final class CellUtil { */ @Deprecated public static Cell createCell(Cell cell, List tags) { - return createCell(cell, TagUtil.fromList(tags)); + return createCell(cell, LimitedPrivateTagUtil.fromList(tags)); } /** @@ -1111,7 +1111,7 @@ public final class CellUtil { @Deprecated public static List getTags(Cell cell) { List tags = new ArrayList<>(); - Iterator tagsItr = PrivateCellUtil.tagsIterator(cell); + Iterator tagsItr = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsItr.hasNext()) { tags.add(tagsItr.next()); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java index 41d204c..46b681e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java @@ -27,7 +27,7 @@ import org.apache.yetus.audience.InterfaceAudience; * TODO: ditto for ByteBufferCell? */ @InterfaceAudience.Private -public interface ExtendedCellBuilder extends CellBuilder { +public interface ExtendedCellBuilder extends TagCellBuilder { @Override ExtendedCellBuilder setRow(final byte[] row); @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java index c595e2c..3193f64 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java @@ -17,29 +17,12 @@ */ package org.apache.hadoop.hbase; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ArrayUtils; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private -public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder { - protected byte[] row = null; - protected int rOffset = 0; - protected int rLength = 0; - protected byte[] family = null; - protected int fOffset = 0; - protected int fLength = 0; - protected byte[] qualifier = null; - protected int qOffset = 0; - protected int qLength = 0; - protected long timestamp = HConstants.LATEST_TIMESTAMP; - protected KeyValue.Type type = null; - protected byte[] value = null; - protected int vOffset = 0; - protected int vLength = 0; - protected long seqId = 0; - protected byte[] tags = null; - protected int tagsOffset = 0; - protected int tagsLength = 0; +public abstract class ExtendedCellBuilderImpl extends TagCellBuilderImpl + implements ExtendedCellBuilder { @Override public ExtendedCellBuilder setRow(final byte[] row) { @@ -166,15 +149,4 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder { tagsLength = 0; return this; } - - private static KeyValue.Type toKeyValueType(DataType type) { - switch (type) { - case Put: return KeyValue.Type.Put; - case Delete: return KeyValue.Type.Delete; - case DeleteColumn: return KeyValue.Type.DeleteColumn; - case DeleteFamilyVersion: return KeyValue.Type.DeleteFamilyVersion; - case DeleteFamily: return KeyValue.Type.DeleteFamily; - default: throw new UnsupportedOperationException("Unsupported data type:" + type); - } - } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java index a15843c..1e468e9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java @@ -94,7 +94,7 @@ public class IndividualBytesFieldCell implements ExtendedCell { } // Check tags - TagUtil.checkForTagsLength(tagsLength); + LimitedPrivateTagUtil.checkForTagsLength(tagsLength); checkArrayBounds(row, rOffset, rLength); checkArrayBounds(family, fOffset, fLength); checkArrayBounds(qualifier, qOffset, qLength); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 42ac97d..8e28ca1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -766,7 +766,7 @@ public class KeyValue implements ExtendedCell { if (qlength > Integer.MAX_VALUE - rlength - flength) { throw new IllegalArgumentException("Qualifier > " + Integer.MAX_VALUE); } - TagUtil.checkForTagsLength(tagsLength); + LimitedPrivateTagUtil.checkForTagsLength(tagsLength); // Key length long longkeylength = getKeyDataStructureSize(rlength, flength, qlength); if (longkeylength > Integer.MAX_VALUE) { @@ -884,7 +884,7 @@ public class KeyValue implements ExtendedCell { tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE; } } - TagUtil.checkForTagsLength(tagsLength); + LimitedPrivateTagUtil.checkForTagsLength(tagsLength); int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); int keyValueLength = (int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, tagsLength); @@ -918,7 +918,7 @@ public class KeyValue implements ExtendedCell { int tlen = t.getValueLength(); pos = Bytes.putAsShort(buffer, pos, tlen + Tag.TYPE_LENGTH_SIZE); pos = Bytes.putByte(buffer, pos, t.getType()); - TagUtil.copyValueTo(t, buffer, pos); + LimitedPrivateTagUtil.copyValueTo(t, buffer, pos); pos += tlen; } } @@ -951,7 +951,7 @@ public class KeyValue implements ExtendedCell { int vlength, byte[] tags, int tagsOffset, int tagsLength) { checkParameters(row, rlength, family, flength, qlength, vlength); - TagUtil.checkForTagsLength(tagsLength); + LimitedPrivateTagUtil.checkForTagsLength(tagsLength); // Allocate right-sized byte array. int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, @@ -1001,7 +1001,7 @@ public class KeyValue implements ExtendedCell { tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE; } } - TagUtil.checkForTagsLength(tagsLength); + LimitedPrivateTagUtil.checkForTagsLength(tagsLength); // Allocate right-sized byte array. int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, @@ -1041,7 +1041,7 @@ public class KeyValue implements ExtendedCell { int tlen = t.getValueLength(); pos = Bytes.putAsShort(bytes, pos, tlen + Tag.TYPE_LENGTH_SIZE); pos = Bytes.putByte(bytes, pos, t.getType()); - TagUtil.copyValueTo(t, bytes, pos); + LimitedPrivateTagUtil.copyValueTo(t, bytes, pos); pos += tlen; } } @@ -1530,7 +1530,7 @@ public class KeyValue implements ExtendedCell { if (tagsLength == 0) { return EMPTY_ARRAY_LIST; } - return TagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength); + return LimitedPrivateTagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 6fd37c0..e111101 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -163,7 +163,7 @@ public class KeyValueUtil { pos = CellUtil.copyValueTo(cell, output, pos); if (withTags && (cell.getTagsLength() > 0)) { pos = Bytes.putAsShort(output, pos, cell.getTagsLength()); - pos = PrivateCellUtil.copyTagsTo(cell, output, pos); + pos = LimitedPrivateCellUtil.copyTagsTo(cell, output, pos); } return pos; } @@ -179,7 +179,7 @@ public class KeyValueUtil { int tagsLength = cell.getTagsLength(); if (withTags && (tagsLength > 0)) { offset = ByteBufferUtils.putAsShort(buf, offset, tagsLength);// Tags length - offset = PrivateCellUtil.copyTagsTo(cell, buf, offset);// Tags bytes + offset = LimitedPrivateCellUtil.copyTagsTo(cell, buf, offset);// Tags bytes } return offset; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueWithTagsBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueWithTagsBuilder.java new file mode 100644 index 0000000..b6b6e7a --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueWithTagsBuilder.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) +@InterfaceStability.Evolving +public class KeyValueWithTagsBuilder extends TagCellBuilderImpl { + @Override + protected Cell innerBuild() { + KeyValue kv = new KeyValue(row, rOffset, rLength, family, fOffset, fLength, qualifier, qOffset, + qLength, timestamp, type, value, vOffset, vLength, tags, tagsOffset, tagsLength); + return kv; + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/LimitedPrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/LimitedPrivateCellUtil.java new file mode 100644 index 0000000..03d8879 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/LimitedPrivateCellUtil.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +/** + * Utility methods to work with Cells that can be used by CPs + */ +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) +@InterfaceStability.Evolving +public class LimitedPrivateCellUtil { + + private LimitedPrivateCellUtil() { + // prevent instantiation + } + + /** + * Returns tag value in a new byte array. If server-side, use {@link Tag#getValueArray()} with + * appropriate {@link Tag#getValueOffset()} and {@link Tag#getValueLength()} instead to save on + * allocations. + * @param cell + * @return tag value in a new byte array. + */ + public static byte[] getTagsArray(Cell cell) { + byte[] output = new byte[cell.getTagsLength()]; + copyTagsTo(cell, output, 0); + return output; + } + + public static byte[] cloneTags(Cell cell) { + byte[] output = new byte[cell.getTagsLength()]; + copyTagsTo(cell, output, 0); + return output; + } + + /** + * Copies the tags info into the tag portion of the cell + * @param cell + * @param destination + * @param destinationOffset + * @return position after tags + */ + public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) { + int tlen = cell.getTagsLength(); + if (cell instanceof ByteBufferCell) { + ByteBufferUtils.copyFromBufferToArray(destination, + ((ByteBufferCell) cell).getTagsByteBuffer(), ((ByteBufferCell) cell).getTagsPosition(), + destinationOffset, tlen); + } else { + System.arraycopy(cell.getTagsArray(), cell.getTagsOffset(), destination, destinationOffset, + tlen); + } + return destinationOffset + tlen; + } + + /** + * Copies the tags info into the tag portion of the cell + * @param cell + * @param destination + * @param destinationOffset + * @return the position after tags + */ + public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) { + int tlen = cell.getTagsLength(); + if (cell instanceof ByteBufferCell) { + ByteBufferUtils.copyFromBufferToBuffer(((ByteBufferCell) cell).getTagsByteBuffer(), + destination, ((ByteBufferCell) cell).getTagsPosition(), destinationOffset, tlen); + } else { + ByteBufferUtils.copyFromArrayToBuffer(destination, destinationOffset, cell.getTagsArray(), + cell.getTagsOffset(), tlen); + } + return destinationOffset + tlen; + } + + /** + * @param cell The Cell + * @return Tags in the given Cell as a List + */ + public static List getTags(Cell cell) { + List tags = new ArrayList<>(); + Iterator tagsItr = tagsIterator(cell); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } + + /** + * Retrieve Cell's first tag, matching the passed in type + * @param cell The Cell + * @param type Type of the Tag to retrieve + * @return null if there is no tag of the passed in tag type + */ + public static Tag getTag(Cell cell, byte type) { + boolean bufferBacked = cell instanceof ByteBufferCell; + int length = cell.getTagsLength(); + int offset = bufferBacked ? ((ByteBufferCell) cell).getTagsPosition() : cell.getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen; + if (bufferBacked) { + ByteBuffer tagsBuffer = ((ByteBufferCell) cell).getTagsByteBuffer(); + tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE); + if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) { + return new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE); + } + } else { + tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE); + if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { + return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE); + } + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return null; + } + + /** + * Util method to iterate through the tags in the given cell. + * @param cell The Cell over which tags iterator is needed. + * @return iterator for the tags + */ + public static Iterator tagsIterator(final Cell cell) { + final int tagsLength = cell.getTagsLength(); + // Save an object allocation where we can + if (tagsLength == 0) { + return TagUtil.EMPTY_TAGS_ITR; + } + if (cell instanceof ByteBufferCell) { + return tagsIterator(((ByteBufferCell) cell).getTagsByteBuffer(), + ((ByteBufferCell) cell).getTagsPosition(), tagsLength); + } + return tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + } + + private static Iterator tagsIterator(final byte[] tags, final int offset, final int length) { + return new Iterator() { + private int pos = offset; + private int endOffset = offset + length - 1; + + @Override + public boolean hasNext() { + return this.pos < endOffset; + } + + @Override + public Tag next() { + if (hasNext()) { + int curTagLen = Bytes.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); + Tag tag = new ArrayBackedTag(tags, pos, curTagLen + TAG_LENGTH_SIZE); + this.pos += Bytes.SIZEOF_SHORT + curTagLen; + return tag; + } + return null; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + + private static Iterator tagsIterator(final ByteBuffer tags, final int offset, + final int length) { + return new Iterator() { + private int pos = offset; + private int endOffset = offset + length - 1; + + @Override + public boolean hasNext() { + return this.pos < endOffset; + } + + @Override + public Tag next() { + if (hasNext()) { + int curTagLen = ByteBufferUtils.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); + Tag tag = new ByteBufferTag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE); + this.pos += Bytes.SIZEOF_SHORT + curTagLen; + return tag; + } + return null; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/LimitedPrivateTagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/LimitedPrivateTagUtil.java new file mode 100644 index 0000000..e0b7326 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/LimitedPrivateTagUtil.java @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) +@InterfaceStability.Evolving +public class LimitedPrivateTagUtil { + + // If you would like to check the length of tags, please call {@link + // TagUtil#checkForTagsLength()}. + private static final int MAX_TAGS_LENGTH = (2 * Short.MAX_VALUE) + 1; + + private LimitedPrivateTagUtil() { + // prevent instantiation + } + + /** + * Check the length of tags. If it is invalid, throw IllegalArgumentException + * @param tagsLength + * @throws IllegalArgumentException if tagslength is invalid + */ + public static void checkForTagsLength(int tagsLength) { + if (tagsLength > MAX_TAGS_LENGTH) { + throw new IllegalArgumentException("tagslength " + tagsLength + " > " + MAX_TAGS_LENGTH); + } + } + + /** + * Returns tag value in a new byte array. Primarily for use client-side. If server-side, use + * {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()} and + * {@link Tag#getValueLength()} instead to save on allocations. + * @param tag The Tag whose value to be returned + * @return tag value in a new byte array. + */ + public static byte[] cloneValue(Tag tag) { + int tagLength = tag.getValueLength(); + byte[] tagArr = new byte[tagLength]; + if (tag.hasArray()) { + Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength); + } else { + ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(), + 0, tagLength); + } + return tagArr; + } + + /** + * Converts the value bytes of the given tag into a String value + * @param tag The Tag + * @return value as String + */ + public static String getValueAsString(Tag tag) { + if (tag.hasArray()) { + return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); + } + return Bytes.toString(cloneValue(tag)); + } + + /** + * Matches the value part of given tags + * @param t1 Tag to match the value + * @param t2 Tag to match the value + * @return True if values of both tags are same. + */ + public static boolean matchingValue(Tag t1, Tag t2) { + if (t1.hasArray() && t2.hasArray()) { + return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(), + t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); + } + if (t1.hasArray()) { + return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(), + t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength()); + } + if (t2.hasArray()) { + return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), + t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); + } + return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), t1.getValueLength(), + t2.getValueByteBuffer(), t2.getValueOffset(), t2.getValueLength()); + } + + /** + * Copies the tag's value bytes to the given byte array + * @param tag The Tag + * @param out The byte array where to copy the Tag value. + * @param offset The offset within 'out' array where to copy the Tag value. + */ + public static void copyValueTo(Tag tag, byte[] out, int offset) { + if (tag.hasArray()) { + Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); + } else { + ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(), + offset, tag.getValueLength()); + } + } + + /** + * Creates list of tags from given byte array, expected that it is in the expected tag format. + * @param b The byte array + * @param offset The offset in array where tag bytes begin + * @param length Total length of all tags bytes + * @return List of tags + */ + public static List asList(byte[] b, int offset, int length) { + List tags = new ArrayList<>(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE); + tags.add(new ArrayBackedTag(b, pos, tagLen + TAG_LENGTH_SIZE)); + pos += TAG_LENGTH_SIZE + tagLen; + } + return tags; + } + + /** + * Creates list of tags from given ByteBuffer, expected that it is in the expected tag format. + * @param b The ByteBuffer + * @param offset The offset in ByteBuffer where tag bytes begin + * @param length Total length of all tags bytes + * @return List of tags + */ + public static List asList(ByteBuffer b, int offset, int length) { + List tags = new ArrayList<>(); + int pos = offset; + while (pos < offset + length) { + int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE); + tags.add(new ByteBufferTag(b, pos, tagLen + TAG_LENGTH_SIZE)); + pos += TAG_LENGTH_SIZE + tagLen; + } + return tags; + } + + /** + * Write a list of tags into a byte array + * @param tags The list of tags + * @return the serialized tag data as bytes + */ + public static byte[] fromList(List tags) { + if (tags == null || tags.isEmpty()) { + return HConstants.EMPTY_BYTE_ARRAY; + } + int length = 0; + for (Tag tag : tags) { + length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE; + } + byte[] b = new byte[length]; + int pos = 0; + int tlen; + for (Tag tag : tags) { + tlen = tag.getValueLength(); + pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE); + pos = Bytes.putByte(b, pos, tag.getType()); + if (tag.hasArray()) { + pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen); + } else { + ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(), + pos, tlen); + pos += tlen; + } + } + return b; + } + + /** + * Converts the value bytes of the given tag into a long value + * @param tag The Tag + * @return value as long + */ + public static long getValueAsLong(Tag tag) { + if (tag.hasArray()) { + return Bytes.toLong(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); + } + return ByteBufferUtils.toLong(tag.getValueByteBuffer(), tag.getValueOffset()); + } + + /** + * Converts the value bytes of the given tag into a byte value + * @param tag The Tag + * @return value as byte + */ + public static byte getValueAsByte(Tag tag) { + if (tag.hasArray()) { + return tag.getValueArray()[tag.getValueOffset()]; + } + return ByteBufferUtils.toByte(tag.getValueByteBuffer(), tag.getValueOffset()); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java index d70d974..0a8ae68 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java @@ -49,7 +49,6 @@ import com.google.common.annotations.VisibleForTesting; * rich set of APIs than those in {@link CellUtil} for internal usage. */ @InterfaceAudience.Private -// TODO : Make Tag IA.LimitedPrivate and move some of the Util methods to CP exposed Util class public class PrivateCellUtil { /** @@ -82,64 +81,6 @@ public class PrivateCellUtil { return range.set(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } - /** - * Returns tag value in a new byte array. If server-side, use {@link Tag#getValueArray()} with - * appropriate {@link Tag#getValueOffset()} and {@link Tag#getValueLength()} instead to save on - * allocations. - * @param cell - * @return tag value in a new byte array. - */ - public static byte[] getTagsArray(Cell cell) { - byte[] output = new byte[cell.getTagsLength()]; - copyTagsTo(cell, output, 0); - return output; - } - - public static byte[] cloneTags(Cell cell) { - byte[] output = new byte[cell.getTagsLength()]; - copyTagsTo(cell, output, 0); - return output; - } - - /** - * Copies the tags info into the tag portion of the cell - * @param cell - * @param destination - * @param destinationOffset - * @return position after tags - */ - public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) { - int tlen = cell.getTagsLength(); - if (cell instanceof ByteBufferCell) { - ByteBufferUtils.copyFromBufferToArray(destination, - ((ByteBufferCell) cell).getTagsByteBuffer(), ((ByteBufferCell) cell).getTagsPosition(), - destinationOffset, tlen); - } else { - System.arraycopy(cell.getTagsArray(), cell.getTagsOffset(), destination, destinationOffset, - tlen); - } - return destinationOffset + tlen; - } - - /** - * Copies the tags info into the tag portion of the cell - * @param cell - * @param destination - * @param destinationOffset - * @return the position after tags - */ - public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) { - int tlen = cell.getTagsLength(); - if (cell instanceof ByteBufferCell) { - ByteBufferUtils.copyFromBufferToBuffer(((ByteBufferCell) cell).getTagsByteBuffer(), - destination, ((ByteBufferCell) cell).getTagsPosition(), destinationOffset, tlen); - } else { - ByteBufferUtils.copyFromArrayToBuffer(destination, destinationOffset, cell.getTagsArray(), - cell.getTagsOffset(), tlen); - } - return destinationOffset + tlen; - } - /********************* misc *************************************/ public static byte getRowByte(Cell cell, int index) { @@ -168,7 +109,7 @@ public class PrivateCellUtil { * @return A new cell which is having the extra tags also added to it. */ public static Cell createCell(Cell cell, List tags) { - return createCell(cell, TagUtil.fromList(tags)); + return createCell(cell, LimitedPrivateTagUtil.fromList(tags)); } /** @@ -878,124 +819,6 @@ public class PrivateCellUtil { return t == Type.DeleteColumn.getCode() || t == Type.DeleteFamily.getCode(); } - private static Iterator tagsIterator(final ByteBuffer tags, final int offset, - final int length) { - return new Iterator() { - private int pos = offset; - private int endOffset = offset + length - 1; - - @Override - public boolean hasNext() { - return this.pos < endOffset; - } - - @Override - public Tag next() { - if (hasNext()) { - int curTagLen = ByteBufferUtils.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); - Tag tag = new ByteBufferTag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE); - this.pos += Bytes.SIZEOF_SHORT + curTagLen; - return tag; - } - return null; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - - /** - * Util method to iterate through the tags in the given cell. - * @param cell The Cell over which tags iterator is needed. - * @return iterator for the tags - */ - public static Iterator tagsIterator(final Cell cell) { - final int tagsLength = cell.getTagsLength(); - // Save an object allocation where we can - if (tagsLength == 0) { - return TagUtil.EMPTY_TAGS_ITR; - } - if (cell instanceof ByteBufferCell) { - return tagsIterator(((ByteBufferCell) cell).getTagsByteBuffer(), - ((ByteBufferCell) cell).getTagsPosition(), tagsLength); - } - return tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); - } - - private static Iterator tagsIterator(final byte[] tags, final int offset, final int length) { - return new Iterator() { - private int pos = offset; - private int endOffset = offset + length - 1; - - @Override - public boolean hasNext() { - return this.pos < endOffset; - } - - @Override - public Tag next() { - if (hasNext()) { - int curTagLen = Bytes.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); - Tag tag = new ArrayBackedTag(tags, pos, curTagLen + TAG_LENGTH_SIZE); - this.pos += Bytes.SIZEOF_SHORT + curTagLen; - return tag; - } - return null; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - - /** - * @param cell The Cell - * @return Tags in the given Cell as a List - */ - public static List getTags(Cell cell) { - List tags = new ArrayList<>(); - Iterator tagsItr = tagsIterator(cell); - while (tagsItr.hasNext()) { - tags.add(tagsItr.next()); - } - return tags; - } - - /** - * Retrieve Cell's first tag, matching the passed in type - * @param cell The Cell - * @param type Type of the Tag to retrieve - * @return null if there is no tag of the passed in tag type - */ - public static Tag getTag(Cell cell, byte type) { - boolean bufferBacked = cell instanceof ByteBufferCell; - int length = cell.getTagsLength(); - int offset = bufferBacked ? ((ByteBufferCell) cell).getTagsPosition() : cell.getTagsOffset(); - int pos = offset; - while (pos < offset + length) { - int tagLen; - if (bufferBacked) { - ByteBuffer tagsBuffer = ((ByteBufferCell) cell).getTagsByteBuffer(); - tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE); - if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) { - return new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE); - } - } else { - tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE); - if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { - return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE); - } - } - pos += TAG_LENGTH_SIZE + tagLen; - } - return null; - } - /** * Returns true if the first range start1...end1 overlaps with the second range start2...end2, * assuming the byte arrays represent row keys diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java index 8a25898..55efe60 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.util.Bytes; *

* See {@link TagType} for reserved tag types. */ -@InterfaceAudience.Private +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @InterfaceStability.Evolving public interface Tag { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilder.java new file mode 100644 index 0000000..a4dc158 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilder.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +/** + * For coprocessor's to build cells with tags. {@link Tag}s are not exposed publicly. + * Use {@link TagCellBuilderFactory} to get TagCellBuilder + * instance. + */ +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) +@InterfaceStability.Evolving +public interface TagCellBuilder extends CellBuilder { + @Override + TagCellBuilder setRow(final byte[] row); + @Override + TagCellBuilder setRow(final byte[] row, final int rOffset, final int rLength); + + @Override + TagCellBuilder setFamily(final byte[] family); + @Override + TagCellBuilder setFamily(final byte[] family, final int fOffset, final int fLength); + + @Override + TagCellBuilder setQualifier(final byte[] qualifier); + @Override + TagCellBuilder setQualifier(final byte[] qualifier, final int qOffset, final int qLength); + + @Override + TagCellBuilder setTimestamp(final long timestamp); + @Override + TagCellBuilder setType(final DataType type); + @Override + TagCellBuilder setValue(final byte[] value); + @Override + TagCellBuilder setValue(final byte[] value, final int vOffset, final int vLength); + + TagCellBuilder setTags(final byte[] tags); + + TagCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength); + + TagCellBuilder setType(final byte type); + + @Override + TagCellBuilder clear(); +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilderFactory.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilderFactory.java new file mode 100644 index 0000000..f755515 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilderFactory.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +/** + * Factory to build cells with Tags + */ +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) +@InterfaceStability.Evolving +public final class TagCellBuilderFactory { + public static TagCellBuilder create(CellBuilderType type) { + switch (type) { + case SHALLOW_COPY: + return new IndividualBytesFieldCellBuilder(); + case DEEP_COPY: + return new KeyValueWithTagsBuilder(); + default: + throw new UnsupportedOperationException("The type:" + type + " is unsupported"); + } + } + + private TagCellBuilderFactory(){ + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilderImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilderImpl.java new file mode 100644 index 0000000..7999e8c --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagCellBuilderImpl.java @@ -0,0 +1,176 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.util.ArrayUtils; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) +@InterfaceStability.Evolving +public abstract class TagCellBuilderImpl implements TagCellBuilder { + protected byte[] row = null; + protected int rOffset = 0; + protected int rLength = 0; + protected byte[] family = null; + protected int fOffset = 0; + protected int fLength = 0; + protected byte[] qualifier = null; + protected int qOffset = 0; + protected int qLength = 0; + protected long timestamp = HConstants.LATEST_TIMESTAMP; + protected KeyValue.Type type = null; + protected byte[] value = null; + protected int vOffset = 0; + protected int vLength = 0; + protected long seqId = 0; + protected byte[] tags = null; + protected int tagsOffset = 0; + protected int tagsLength = 0; + + @Override + public TagCellBuilder setRow(final byte[] row) { + return setRow(row, 0, ArrayUtils.length(row)); + } + + @Override + public TagCellBuilder setRow(final byte[] row, int rOffset, int rLength) { + this.row = row; + this.rOffset = rOffset; + this.rLength = rLength; + return this; + } + + @Override + public TagCellBuilder setFamily(final byte[] family) { + return setFamily(family, 0, ArrayUtils.length(family)); + } + + @Override + public TagCellBuilder setFamily(final byte[] family, int fOffset, int fLength) { + this.family = family; + this.fOffset = fOffset; + this.fLength = fLength; + return this; + } + + @Override + public TagCellBuilder setQualifier(final byte[] qualifier) { + return setQualifier(qualifier, 0, ArrayUtils.length(qualifier)); + } + + @Override + public TagCellBuilder setQualifier(final byte[] qualifier, int qOffset, int qLength) { + this.qualifier = qualifier; + this.qOffset = qOffset; + this.qLength = qLength; + return this; + } + + @Override + public TagCellBuilder setTimestamp(final long timestamp) { + this.timestamp = timestamp; + return this; + } + + @Override + public TagCellBuilder setType(final DataType type) { + this.type = toKeyValueType(type); + return this; + } + + @Override + public TagCellBuilder setType(final byte type) { + this.type = KeyValue.Type.codeToType(type); + return this; + } + + @Override + public TagCellBuilder setValue(final byte[] value) { + return setValue(value, 0, ArrayUtils.length(value)); + } + + @Override + public TagCellBuilder setValue(final byte[] value, int vOffset, int vLength) { + this.value = value; + this.vOffset = vOffset; + this.vLength = vLength; + return this; + } + + @Override + public TagCellBuilder setTags(final byte[] tags) { + return setTags(tags, 0, ArrayUtils.length(tags)); + } + + @Override + public TagCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength) { + this.tags = tags; + this.tagsOffset = tagsOffset; + this.tagsLength = tagsLength; + return this; + } + + private void checkBeforeBuild() { + if (type == null) { + throw new IllegalArgumentException("The type can't be NULL"); + } + } + + protected abstract Cell innerBuild(); + + @Override + public Cell build() { + checkBeforeBuild(); + return innerBuild(); + } + + @Override + public TagCellBuilder clear() { + row = null; + rOffset = 0; + rLength = 0; + family = null; + fOffset = 0; + fLength = 0; + qualifier = null; + qOffset = 0; + qLength = 0; + timestamp = HConstants.LATEST_TIMESTAMP; + type = null; + value = null; + vOffset = 0; + vLength = 0; + seqId = 0; + tags = null; + tagsOffset = 0; + tagsLength = 0; + return this; + } + + static KeyValue.Type toKeyValueType(DataType type) { + switch (type) { + case Put: return KeyValue.Type.Put; + case Delete: return KeyValue.Type.Delete; + case DeleteColumn: return KeyValue.Type.DeleteColumn; + case DeleteFamilyVersion: return KeyValue.Type.DeleteFamilyVersion; + case DeleteFamily: return KeyValue.Type.DeleteFamily; + default: throw new UnsupportedOperationException("Unsupported data type:" + type); + } + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java index 71a2fbb..e4b519c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java @@ -20,8 +20,10 @@ package org.apache.hadoop.hbase; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -@InterfaceAudience.Private +@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @InterfaceStability.Evolving +// Exposing this means if a CP tries to have its own tagtype then probably they should +// add a higher byte for that?? public final class TagType { // Please declare new Tag Types here to avoid step on pre-existing tag types. public static final byte ACL_TAG_TYPE = (byte) 1; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java index a4962f4..d75ef7c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java @@ -17,197 +17,25 @@ */ package org.apache.hadoop.hbase; -import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; - import java.io.IOException; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public final class TagUtil { - - // If you would like to check the length of tags, please call {@link TagUtil#checkForTagsLength()}. - private static final int MAX_TAGS_LENGTH = (2 * Short.MAX_VALUE) + 1; - /** * Private constructor to keep this class from being instantiated. */ private TagUtil(){} /** - * Returns tag value in a new byte array. - * Primarily for use client-side. If server-side, use - * {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()} - * and {@link Tag#getValueLength()} instead to save on allocations. - * - * @param tag The Tag whose value to be returned - * @return tag value in a new byte array. - */ - public static byte[] cloneValue(Tag tag) { - int tagLength = tag.getValueLength(); - byte[] tagArr = new byte[tagLength]; - if (tag.hasArray()) { - Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength); - } else { - ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(), - 0, tagLength); - } - return tagArr; - } - - /** - * Creates list of tags from given byte array, expected that it is in the expected tag format. - * - * @param b The byte array - * @param offset The offset in array where tag bytes begin - * @param length Total length of all tags bytes - * @return List of tags - */ - public static List asList(byte[] b, int offset, int length) { - List tags = new ArrayList<>(); - int pos = offset; - while (pos < offset + length) { - int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE); - tags.add(new ArrayBackedTag(b, pos, tagLen + TAG_LENGTH_SIZE)); - pos += TAG_LENGTH_SIZE + tagLen; - } - return tags; - } - - /** - * Creates list of tags from given ByteBuffer, expected that it is in the expected tag format. - * - * @param b The ByteBuffer - * @param offset The offset in ByteBuffer where tag bytes begin - * @param length Total length of all tags bytes - * @return List of tags - */ - public static List asList(ByteBuffer b, int offset, int length) { - List tags = new ArrayList<>(); - int pos = offset; - while (pos < offset + length) { - int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE); - tags.add(new ByteBufferTag(b, pos, tagLen + TAG_LENGTH_SIZE)); - pos += TAG_LENGTH_SIZE + tagLen; - } - return tags; - } - - /** - * Write a list of tags into a byte array - * - * @param tags The list of tags - * @return the serialized tag data as bytes - */ - public static byte[] fromList(List tags) { - if (tags == null || tags.isEmpty()) { - return HConstants.EMPTY_BYTE_ARRAY; - } - int length = 0; - for (Tag tag : tags) { - length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE; - } - byte[] b = new byte[length]; - int pos = 0; - int tlen; - for (Tag tag : tags) { - tlen = tag.getValueLength(); - pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE); - pos = Bytes.putByte(b, pos, tag.getType()); - if (tag.hasArray()) { - pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen); - } else { - ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(), - pos, tlen); - pos += tlen; - } - } - return b; - } - - /** - * Converts the value bytes of the given tag into a long value - * @param tag The Tag - * @return value as long - */ - public static long getValueAsLong(Tag tag) { - if (tag.hasArray()) { - return Bytes.toLong(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); - } - return ByteBufferUtils.toLong(tag.getValueByteBuffer(), tag.getValueOffset()); - } - - /** - * Converts the value bytes of the given tag into a byte value - * @param tag The Tag - * @return value as byte - */ - public static byte getValueAsByte(Tag tag) { - if (tag.hasArray()) { - return tag.getValueArray()[tag.getValueOffset()]; - } - return ByteBufferUtils.toByte(tag.getValueByteBuffer(), tag.getValueOffset()); - } - - /** - * Converts the value bytes of the given tag into a String value - * @param tag The Tag - * @return value as String - */ - public static String getValueAsString(Tag tag){ - if(tag.hasArray()){ - return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); - } - return Bytes.toString(cloneValue(tag)); - } - - /** - * Matches the value part of given tags - * @param t1 Tag to match the value - * @param t2 Tag to match the value - * @return True if values of both tags are same. - */ - public static boolean matchingValue(Tag t1, Tag t2) { - if (t1.hasArray() && t2.hasArray()) { - return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(), - t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); - } - if (t1.hasArray()) { - return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(), - t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength()); - } - if (t2.hasArray()) { - return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), - t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); - } - return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), t1.getValueLength(), - t2.getValueByteBuffer(), t2.getValueOffset(), t2.getValueLength()); - } - - /** - * Copies the tag's value bytes to the given byte array - * @param tag The Tag - * @param out The byte array where to copy the Tag value. - * @param offset The offset within 'out' array where to copy the Tag value. - */ - public static void copyValueTo(Tag tag, byte[] out, int offset) { - if (tag.hasArray()) { - Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); - } else { - ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(), - offset, tag.getValueLength()); - } - } - - /** * Reads an int value stored as a VInt at tag's given offset. * @param tag The Tag * @param offset The offset where VInt bytes begin @@ -232,7 +60,7 @@ public final class TagUtil { * Add to tagsOrNull any Tags cell is carrying or null if none. */ public static List carryForwardTags(final List tagsOrNull, final Cell cell) { - Iterator itr = PrivateCellUtil.tagsIterator(cell); + Iterator itr = LimitedPrivateCellUtil.tagsIterator(cell); if (itr == EMPTY_TAGS_ITR) { // If no Tags, return early. return tagsOrNull; @@ -313,16 +141,4 @@ public final class TagUtil { throw new UnsupportedOperationException(); } }; - - /** - * Check the length of tags. If it is invalid, throw IllegalArgumentException - * - * @param tagsLength - * @throws IllegalArgumentException if tagslength is invalid - */ - public static void checkForTagsLength(int tagsLength) { - if (tagsLength > MAX_TAGS_LENGTH) { - throw new IllegalArgumentException("tagslength "+ tagsLength + " > " + MAX_TAGS_LENGTH); - } - } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java index c5ce8de..b4eb435 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestByteBufferKeyValue.java @@ -158,17 +158,20 @@ public class TestByteBufferKeyValue { assertEquals(0L, offheapKV.getTimestamp()); assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); // change tags to handle both onheap and offheap stuff - List resTags = TagUtil.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(), - offheapKV.getTagsLength()); + List resTags = LimitedPrivateTagUtil.asList(offheapKV.getTagsArray(), + offheapKV.getTagsOffset(), offheapKV.getTagsLength()); Tag tag1 = resTags.get(0); assertEquals(t1.getType(), tag1.getType()); - assertEquals(TagUtil.getValueAsString(t1), TagUtil.getValueAsString(tag1)); + assertEquals(LimitedPrivateTagUtil.getValueAsString(t1), + LimitedPrivateTagUtil.getValueAsString(tag1)); Tag tag2 = resTags.get(1); assertEquals(tag2.getType(), tag2.getType()); - assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2)); - Tag res = PrivateCellUtil.getTag(offheapKV, (byte) 2); - assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2)); - res = PrivateCellUtil.getTag(offheapKV, (byte) 3); + assertEquals(LimitedPrivateTagUtil.getValueAsString(t2), + LimitedPrivateTagUtil.getValueAsString(tag2)); + Tag res = LimitedPrivateCellUtil.getTag(offheapKV, (byte) 2); + assertEquals(LimitedPrivateTagUtil.getValueAsString(t2), + LimitedPrivateTagUtil.getValueAsString(tag2)); + res = LimitedPrivateCellUtil.getTag(offheapKV, (byte) 3); assertNull(res); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index 397476f..8b2d640 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -426,7 +426,7 @@ public class TestCellUtil { byte[] vDest = CellUtil.cloneValue(bbCell); assertTrue(Bytes.equals(v, vDest)); byte[] tDest = new byte[tags.length]; - PrivateCellUtil.copyTagsTo(bbCell, tDest, 0); + LimitedPrivateCellUtil.copyTagsTo(bbCell, tDest, 0); assertTrue(Bytes.equals(tags, tDest)); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 4ff4f05..e9a8d3e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -459,42 +459,42 @@ public class TestKeyValue extends TestCase { boolean meta1Ok = false, meta2Ok = false; for (Tag tag : tags) { if (tag.getType() == (byte) 1) { - if (Bytes.equals(TagUtil.cloneValue(tag), metaValue1)) { + if (Bytes.equals(LimitedPrivateTagUtil.cloneValue(tag), metaValue1)) { meta1Ok = true; } } else { - if (Bytes.equals(TagUtil.cloneValue(tag), metaValue2)) { + if (Bytes.equals(LimitedPrivateTagUtil.cloneValue(tag), metaValue2)) { meta2Ok = true; } } } assertTrue(meta1Ok); assertTrue(meta2Ok); - Iterator tagItr = PrivateCellUtil.tagsIterator(kv); + Iterator tagItr = LimitedPrivateCellUtil.tagsIterator(kv); //Iterator tagItr = kv.tagsIterator(); assertTrue(tagItr.hasNext()); Tag next = tagItr.next(); assertEquals(10, next.getValueLength()); assertEquals((byte) 1, next.getType()); - Bytes.equals(TagUtil.cloneValue(next), metaValue1); + Bytes.equals(LimitedPrivateTagUtil.cloneValue(next), metaValue1); assertTrue(tagItr.hasNext()); next = tagItr.next(); assertEquals(10, next.getValueLength()); assertEquals((byte) 2, next.getType()); - Bytes.equals(TagUtil.cloneValue(next), metaValue2); + Bytes.equals(LimitedPrivateTagUtil.cloneValue(next), metaValue2); assertFalse(tagItr.hasNext()); - tagItr = PrivateCellUtil.tagsIterator(kv); + tagItr = LimitedPrivateCellUtil.tagsIterator(kv); assertTrue(tagItr.hasNext()); next = tagItr.next(); assertEquals(10, next.getValueLength()); assertEquals((byte) 1, next.getType()); - Bytes.equals(TagUtil.cloneValue(next), metaValue1); + Bytes.equals(LimitedPrivateTagUtil.cloneValue(next), metaValue1); assertTrue(tagItr.hasNext()); next = tagItr.next(); assertEquals(10, next.getValueLength()); assertEquals((byte) 2, next.getType()); - Bytes.equals(TagUtil.cloneValue(next), metaValue2); + Bytes.equals(LimitedPrivateTagUtil.cloneValue(next), metaValue2); assertFalse(tagItr.hasNext()); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java index d7894f4..a34ca37 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java @@ -37,13 +37,13 @@ public class TestTagUtil { assertEquals(1, tags.size()); Tag ttlTag = tags.get(0); assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType()); - assertEquals(ttl, TagUtil.getValueAsLong(ttlTag)); + assertEquals(ttl, LimitedPrivateTagUtil.getValueAsLong(ttlTag)); // Already having a TTL tag in the list. So the call must remove the old tag long ttl2 = 30 * 1000; tags = TagUtil.carryForwardTTLTag(tags, ttl2); assertEquals(1, tags.size()); ttlTag = tags.get(0); assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType()); - assertEquals(ttl2, TagUtil.getValueAsLong(ttlTag)); + assertEquals(ttl2, LimitedPrivateTagUtil.getValueAsLong(ttlTag)); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index 1b7302f..a9b0704 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -28,22 +28,21 @@ import java.io.DataOutputStream; import java.io.IOException; import java.util.List; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; - @Category({MiscTests.class, SmallTests.class}) public class TestCellCodecWithTags { @@ -79,36 +78,37 @@ public class TestCellCodecWithTags { assertTrue(decoder.advance()); Cell c = decoder.current(); assertTrue(CellUtil.equals(c, cell1)); - List tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + List tags = + LimitedPrivateTagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(2, tags.size()); Tag tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), LimitedPrivateTagUtil.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), LimitedPrivateTagUtil.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, cell2)); - tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = LimitedPrivateTagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(1, tags.size()); tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), LimitedPrivateTagUtil.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, cell3)); - tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = LimitedPrivateTagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(3, tags.size()); tag = tags.get(0); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), LimitedPrivateTagUtil.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), LimitedPrivateTagUtil.cloneValue(tag))); tag = tags.get(2); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), LimitedPrivateTagUtil.cloneValue(tag))); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index badf048..beafa4a 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -28,22 +28,21 @@ import java.io.DataOutputStream; import java.io.IOException; import java.util.List; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; -import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; - @Category({MiscTests.class, SmallTests.class}) public class TestKeyValueCodecWithTags { @@ -79,36 +78,37 @@ public class TestKeyValueCodecWithTags { assertTrue(decoder.advance()); Cell c = decoder.current(); assertTrue(CellUtil.equals(c, kv1)); - List tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + List tags = + LimitedPrivateTagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(2, tags.size()); Tag tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), LimitedPrivateTagUtil.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), LimitedPrivateTagUtil.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, kv2)); - tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = LimitedPrivateTagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(1, tags.size()); tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), LimitedPrivateTagUtil.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, kv3)); - tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = LimitedPrivateTagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(3, tags.size()); tag = tags.get(0); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), LimitedPrivateTagUtil.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), LimitedPrivateTagUtil.cloneValue(tag))); tag = tags.get(2); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag))); + assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), LimitedPrivateTagUtil.cloneValue(tag))); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java index 62a7306..6535869 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java @@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -65,7 +66,6 @@ import org.apache.hadoop.hbase.PerformanceEvaluation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -94,7 +94,6 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.ReflectionUtils; -import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -492,7 +491,7 @@ public class TestCellBasedHFileOutputFormat2 { HFileScanner scanner = reader.getScanner(false, false, false); scanner.seekTo(); Cell cell = scanner.getCell(); - List tagsFromCell = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tagsFromCell = LimitedPrivateTagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); assertTrue(tagsFromCell.size() > 0); for (Tag tag : tagsFromCell) { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java index f504702..cdea5c5 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java @@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -65,7 +66,6 @@ import org.apache.hadoop.hbase.PerformanceEvaluation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -492,7 +492,7 @@ public class TestHFileOutputFormat2 { HFileScanner scanner = reader.getScanner(false, false, false); scanner.seekTo(); Cell cell = scanner.getCell(); - List tagsFromCell = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tagsFromCell = LimitedPrivateTagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); assertTrue(tagsFromCell.size() > 0); for (Tag tag : tagsFromCell) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java index 1087465..3156e9b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java @@ -55,22 +55,20 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.regionserver.HStoreFile; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.mob.MobUtils; +import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.TimeRangeTracker; import org.apache.hadoop.hbase.util.BloomFilter; import org.apache.hadoop.hbase.util.BloomFilterFactory; @@ -80,6 +78,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.Counter; @@ -399,7 +399,7 @@ public class HFilePrettyPrinter extends Configured implements Tool { + Bytes.toStringBinary(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); int i = 0; - List tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tags = LimitedPrivateTagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); for (Tag tag : tags) { out.print(String.format(" T[%d]: %s", i++, tag.toString())); @@ -442,7 +442,7 @@ public class HFilePrettyPrinter extends Configured implements Tool { System.err.println("ERROR, wrong value format in mob reference cell " + CellUtil.getCellKeyAsString(cell)); } else { - TableName tn = TableName.valueOf(TagUtil.cloneValue(tnTag)); + TableName tn = TableName.valueOf(LimitedPrivateTagUtil.cloneValue(tnTag)); String mobFileName = MobUtils.getMobFileName(cell); boolean exist = mobFileExists(fs, tn, mobFileName, Bytes.toString(CellUtil.cloneFamily(cell)), foundMobFiles, missingMobFiles); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java index 1d9c10c..e0013e5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java @@ -43,8 +43,9 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -102,7 +103,7 @@ public final class MobUtils { static { List tags = new ArrayList<>(); tags.add(MobConstants.MOB_REF_TAG); - REF_DELETE_MARKER_TAG_BYTES = TagUtil.fromList(tags); + REF_DELETE_MARKER_TAG_BYTES = LimitedPrivateTagUtil.fromList(tags); } /** @@ -176,7 +177,7 @@ public final class MobUtils { */ public static boolean isMobReferenceCell(Cell cell) { if (cell.getTagsLength() > 0) { - Tag tag = PrivateCellUtil.getTag(cell, TagType.MOB_REFERENCE_TAG_TYPE); + Tag tag = LimitedPrivateCellUtil.getTag(cell, TagType.MOB_REFERENCE_TAG_TYPE); return tag != null; } return false; @@ -189,7 +190,7 @@ public final class MobUtils { */ public static Tag getTableNameTag(Cell cell) { if (cell.getTagsLength() > 0) { - return PrivateCellUtil.getTag(cell, TagType.MOB_TABLE_NAME_TAG_TYPE); + return LimitedPrivateCellUtil.getTag(cell, TagType.MOB_TABLE_NAME_TAG_TYPE); } return null; } @@ -497,7 +498,7 @@ public final class MobUtils { // find the original mob files by this table name. For details please see cloning // snapshot for mob files. tags.add(tableNameTag); - return createMobRefCell(cell, fileName, TagUtil.fromList(tags)); + return createMobRefCell(cell, fileName, LimitedPrivateTagUtil.fromList(tags)); } public static Cell createMobRefCell(Cell cell, byte[] fileName, byte[] refCellTags) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index 92c7cef..f65e458 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -49,11 +49,11 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -83,14 +83,13 @@ import org.apache.hadoop.hbase.regionserver.StoreFileScanner; import org.apache.hadoop.hbase.regionserver.StoreFileWriter; import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.security.EncryptionUtil; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; - /** * An implementation of {@link MobCompactor} that compacts the mob files in partitions. */ @@ -132,7 +131,7 @@ public class PartitionedMobCompactor extends MobCompactor { tags.add(MobConstants.MOB_REF_TAG); Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName()); tags.add(tableNameTag); - this.refCellTags = TagUtil.fromList(tags); + this.refCellTags = LimitedPrivateTagUtil.fromList(tags); cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java index 206c3cd..bc09e5f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java @@ -36,13 +36,13 @@ import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; -import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagCellBuilderFactory; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; @@ -121,7 +121,7 @@ public class HMobStore extends HStore { Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, getTableName().getName()); tags.add(tableNameTag); - this.refCellTags = TagUtil.fromList(tags); + this.refCellTags = LimitedPrivateTagUtil.fromList(tags); } /** @@ -333,7 +333,7 @@ public class HMobStore extends HStore { String fileName = MobUtils.getMobFileName(reference); Tag tableNameTag = MobUtils.getTableNameTag(reference); if (tableNameTag != null) { - String tableNameString = TagUtil.getValueAsString(tableNameTag); + String tableNameString = LimitedPrivateTagUtil.getValueAsString(tableNameTag); List locations = map.get(tableNameString); if (locations == null) { IdLock.Entry lockEntry = keyLock.getLockEntry(tableNameString.hashCode()); @@ -358,7 +358,7 @@ public class HMobStore extends HStore { if (result == null) { LOG.warn("The Cell result is null, assemble a new Cell with the same row,family," + "qualifier,timestamp,type and tags but with an empty value to return."); - result = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) + result = TagCellBuilderFactory.create(CellBuilderType.DEEP_COPY) .setRow(reference.getRowArray(), reference.getRowOffset(), reference.getRowLength()) .setFamily(reference.getFamilyArray(), reference.getFamilyOffset(), reference.getFamilyLength()) .setQualifier(reference.getQualifierArray(), reference.getQualifierOffset(), reference.getQualifierLength()) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index e8f62fe..8ca7a6f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -85,6 +85,7 @@ import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.CompoundConfiguration; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; @@ -99,6 +100,7 @@ import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RegionTooBusyException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagCellBuilderFactory; import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.client.Append; @@ -7556,7 +7558,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi if (currentCell != null) { tags = TagUtil.carryForwardTags(tags, currentCell); byte[] newValue = supplier.apply(currentCell); - return ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) + return TagCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) .setRow(mutation.getRow(), 0, mutation.getRow().length) .setFamily(columnFamily, 0, columnFamily.length) // copy the qualifier if the cell is located in shared memory. @@ -7564,7 +7566,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi .setTimestamp(Math.max(currentCell.getTimestamp() + 1, now)) .setType(KeyValue.Type.Put.getCode()) .setValue(newValue, 0, newValue.length) - .setTags(TagUtil.fromList(tags)) + .setTags(LimitedPrivateTagUtil.fromList(tags)) .build(); } else { PrivateCellUtil.updateLatestStamp(delta, now); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java index c636333..4a8b542 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java @@ -24,15 +24,15 @@ import java.util.NavigableSet; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.security.visibility.VisibilityNewVersionBehaivorT import org.apache.hadoop.hbase.security.visibility.VisibilityScanDeleteTracker; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; +import org.apache.yetus.audience.InterfaceAudience; /** * A query matcher that is specifically designed for the scan case. @@ -149,7 +150,7 @@ public abstract class ScanQueryMatcher implements ShipperListener { // Look for a TTL tag first. Use it instead of the family setting if // found. If a cell has multiple TTLs, resolve the conflict by using the // first tag encountered. - Iterator i = PrivateCellUtil.tagsIterator(cell); + Iterator i = LimitedPrivateCellUtil.tagsIterator(cell); while (i.hasNext()) { Tag t = i.next(); if (TagType.TTL_TAG_TYPE == t.getType()) { @@ -157,7 +158,7 @@ public abstract class ScanQueryMatcher implements ShipperListener { // to convert long ts = cell.getTimestamp(); assert t.getValueLength() == Bytes.SIZEOF_LONG; - long ttl = TagUtil.getValueAsLong(t); + long ttl = LimitedPrivateTagUtil.getValueAsLong(t); if (ts + ttl < now) { return true; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index 4e67f6e..52d1931 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -18,13 +18,6 @@ package org.apache.hadoop.hbase.security.access; -import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.PrivateCellUtil; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; - import java.io.ByteArrayInputStream; import java.io.DataInput; import java.io.DataInputStream; @@ -45,12 +38,13 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; @@ -69,12 +63,16 @@ import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableUtils; +import org.apache.yetus.audience.InterfaceAudience; /** * Maintains lists of permission grants to users and groups to allow for @@ -740,7 +738,7 @@ public class AccessControlLists { return null; } List results = Lists.newArrayList(); - Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); + Iterator tagsIterator = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == ACL_TAG_TYPE) { @@ -752,7 +750,7 @@ public class AccessControlLists { if (tag.hasArray()) { ProtobufUtil.mergeFrom(builder, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); } else { - ProtobufUtil.mergeFrom(builder, TagUtil.cloneValue(tag)); + ProtobufUtil.mergeFrom(builder, LimitedPrivateTagUtil.cloneValue(tag)); } ListMultimap kvPerms = AccessControlUtil.toUsersAndPermissions(builder.build()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 0a4f22c..dfcd3ca 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -18,11 +18,6 @@ */ package org.apache.hadoop.hbase.security.access; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; - import java.io.IOException; import java.net.InetAddress; import java.security.PrivilegedExceptionAction; @@ -52,10 +47,11 @@ import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -123,6 +119,13 @@ import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.access.Permission.Action; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.Bytes; @@ -133,13 +136,10 @@ import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; +import com.google.protobuf.Message; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; /** * Provides basic authorization checks for data access and administrative @@ -893,7 +893,7 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, // Prepend the supplied perms in a new ACL tag to an update list of tags for the cell List tags = new ArrayList<>(); tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, perms)); - Iterator tagIterator = PrivateCellUtil.tagsIterator(cell); + Iterator tagIterator = LimitedPrivateCellUtil.tagsIterator(cell); while (tagIterator.hasNext()) { tags.add(tagIterator.next()); } @@ -922,7 +922,7 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, return; } for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { - Iterator tagsItr = PrivateCellUtil.tagsIterator(cellScanner.current()); + Iterator tagsItr = LimitedPrivateCellUtil.tagsIterator(cellScanner.current()); while (tagsItr.hasNext()) { if (tagsItr.next().getType() == AccessControlLists.ACL_TAG_TYPE) { throw new AccessDeniedException("Mutation contains cell with reserved type tag"); @@ -2060,7 +2060,7 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, List aclTags = Lists.newArrayList(); ListMultimap perms = ArrayListMultimap.create(); if (oldCell != null) { - Iterator tagIterator = PrivateCellUtil.tagsIterator(oldCell); + Iterator tagIterator = LimitedPrivateCellUtil.tagsIterator(oldCell); while (tagIterator.hasNext()) { Tag tag = tagIterator.next(); if (tag.getType() != AccessControlLists.ACL_TAG_TYPE) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java index e913b21..f6cdf32 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java @@ -46,7 +46,9 @@ import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; @@ -492,7 +494,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService @Override public boolean evaluate(Cell cell) throws IOException { boolean visibilityTagPresent = false; - Iterator tagsItr = PrivateCellUtil.tagsIterator(cell); + Iterator tagsItr = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsItr.hasNext()) { boolean includeKV = true; Tag tag = tagsItr.next(); @@ -606,7 +608,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService for (Tag tag : deleteVisTags) { matchFound = false; for (Tag givenTag : putVisTags) { - if (TagUtil.matchingValue(tag, givenTag)) { + if (LimitedPrivateTagUtil.matchingValue(tag, givenTag)) { matchFound = true; break; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index 84edf37..0721e93 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -23,11 +23,6 @@ import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SUCCESS; import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY; import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; - import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; @@ -50,12 +45,13 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.PrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.MetaTableAccessor; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; @@ -110,13 +106,17 @@ import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.AccessController; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; +import com.google.protobuf.ByteString; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; /** * Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in @@ -341,7 +341,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso Tag tag = pair.getSecond(); if (cellVisibility == null && tag != null) { // May need to store only the first one - cellVisibility = new CellVisibility(TagUtil.getValueAsString(tag)); + cellVisibility = new CellVisibility(LimitedPrivateTagUtil.getValueAsString(tag)); modifiedTagFound = true; } } @@ -368,7 +368,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso List updatedCells = new ArrayList<>(); for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); - List tags = PrivateCellUtil.getTags(cell); + List tags = LimitedPrivateCellUtil.getTags(cell); if (modifiedTagFound) { // Rewrite the tags by removing the modified tags. removeReplicationVisibilityTag(tags); @@ -471,7 +471,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso // cell visiblilty tags // have been modified Tag modifiedTag = null; - Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); + Iterator tagsIterator = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) { @@ -483,7 +483,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso pair.setSecond(modifiedTag); return pair; } - Iterator tagsItr = PrivateCellUtil.tagsIterator(cell); + Iterator tagsItr = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsItr.hasNext()) { if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) { return pair; @@ -513,7 +513,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso if (isSystemOrSuperUser()) { return true; } - Iterator tagsItr = PrivateCellUtil.tagsIterator(cell); + Iterator tagsItr = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsItr.hasNext()) { if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) { return false; @@ -730,7 +730,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso tags.addAll(this.visibilityLabelService.createVisibilityExpTags(cellVisibility.getExpression(), true, authCheck)); // Carry forward all other tags - Iterator tagsItr = PrivateCellUtil.tagsIterator(newCell); + Iterator tagsItr = LimitedPrivateCellUtil.tagsIterator(newCell); while (tagsItr.hasNext()) { Tag tag = tagsItr.next(); if (tag.getType() != TagType.VISIBILITY_TAG_TYPE diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java index 3fb66b8..0947e30 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.security.visibility; import static org.apache.hadoop.hbase.TagType.VISIBILITY_TAG_TYPE; -import com.google.protobuf.ByteString; - import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; @@ -39,11 +37,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.PrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.Filter; @@ -67,6 +64,8 @@ import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; +import com.google.protobuf.ByteString; + /** * Utility method to support visibility */ @@ -213,11 +212,11 @@ public class VisibilityUtils { */ public static Byte extractVisibilityTags(Cell cell, List tags) { Byte serializationFormat = null; - Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); + Iterator tagsIterator = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { - serializationFormat = TagUtil.getValueAsByte(tag); + serializationFormat = LimitedPrivateTagUtil.getValueAsByte(tag); } else if (tag.getType() == VISIBILITY_TAG_TYPE) { tags.add(tag); } @@ -240,11 +239,11 @@ public class VisibilityUtils { public static Byte extractAndPartitionTags(Cell cell, List visTags, List nonVisTags) { Byte serializationFormat = null; - Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); + Iterator tagsIterator = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { - serializationFormat = TagUtil.getValueAsByte(tag); + serializationFormat = LimitedPrivateTagUtil.getValueAsByte(tag); } else if (tag.getType() == VISIBILITY_TAG_TYPE) { visTags.add(tag); } else { @@ -256,7 +255,7 @@ public class VisibilityUtils { } public static boolean isVisibilityTagsPresent(Cell cell) { - Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); + Iterator tagsIterator = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == VISIBILITY_TAG_TYPE) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java index 839ea31..f0a40bf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java @@ -27,7 +27,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.HelpFormatter; @@ -41,14 +40,16 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; -import org.apache.hadoop.hbase.PrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; + +import com.fasterxml.jackson.databind.ObjectMapper; /** * WALPrettyPrinter prints the contents of a given WAL with a variety of @@ -340,10 +341,11 @@ public class WALPrettyPrinter { stringMap.put("vlen", cell.getValueLength()); if (cell.getTagsLength() > 0) { List tagsString = new ArrayList<>(); - Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); + Iterator tagsIterator = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); - tagsString.add((tag.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(tag))); + tagsString + .add((tag.getType()) + ":" + Bytes.toStringBinary(LimitedPrivateTagUtil.cloneValue(tag))); } stringMap.put("tag", tagsString); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java index d0a3fd4..eae08c4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java @@ -17,9 +17,13 @@ * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; + +import org.apache.yetus.audience.InterfaceAudience; + /** * Used in testcases only. */ +@InterfaceAudience.Private public enum TagUsage { // No tags would be added NO_TAG, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index 91894dc..a1cea2f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -18,37 +18,36 @@ */ package org.apache.hadoop.hbase.io.hfile; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import org.junit.Before; -import org.junit.Test; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.ArrayBackedTag; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ByteBufferKeyValue; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Before; +import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -173,10 +172,10 @@ public class TestSeekTo { assertEquals("i", toRowStr(scanner.getCell())); Cell cell = scanner.getCell(); if (tagUsage != TagUsage.NO_TAG && cell.getTagsLength() > 0) { - Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); + Iterator tagsIterator = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag next = tagsIterator.next(); - assertEquals("myTag1", Bytes.toString(TagUtil.cloneValue(next))); + assertEquals("myTag1", Bytes.toString(LimitedPrivateTagUtil.cloneValue(next))); } } assertTrue(scanner.seekBefore(toKV("k", tagUsage))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java index 5de440d..49af254 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java @@ -20,20 +20,22 @@ package org.apache.hadoop.hbase.protobuf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparatorImpl; -import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.ByteBufferKeyValue; +import org.apache.hadoop.hbase.TagCellBuilderFactory; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; @@ -42,7 +44,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Col import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; -import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -333,7 +334,8 @@ public class TestProtobufUtil { dbb.put(arr); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); - Cell newOffheapKV = ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); + Cell newOffheapKV = + ProtobufUtil.toCell(TagCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); assertTrue(CellComparatorImpl.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java index 3d3c79c..2e67b19 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java @@ -30,11 +30,11 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; @@ -90,7 +90,7 @@ public class TestStoreFileScannerWithTagCompression { kv.getRowLength())); List tags = KeyValueUtil.ensureKeyValue(kv).getTags(); assertEquals(1, tags.size()); - assertEquals("tag3", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag3", Bytes.toString(LimitedPrivateTagUtil.cloneValue(tags.get(0)))); } finally { s.close(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java index 8e3c372..ce08417 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java @@ -26,9 +26,11 @@ import java.util.List; import java.util.Optional; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -37,8 +39,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.CompactionState; @@ -56,10 +56,10 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WALEdit; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -407,7 +407,7 @@ public class TestTags { List tags = TestCoprocessorForTags.tags; assertEquals(3L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(1, tags.size()); - assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag1", Bytes.toString(LimitedPrivateTagUtil.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -425,7 +425,7 @@ public class TestTags { // We cannot assume the ordering of tags List tagValues = new ArrayList<>(); for (Tag tag: tags) { - tagValues.add(Bytes.toString(TagUtil.cloneValue(tag))); + tagValues.add(Bytes.toString(LimitedPrivateTagUtil.cloneValue(tag))); } assertTrue(tagValues.contains("tag1")); assertTrue(tagValues.contains("tag2")); @@ -447,7 +447,7 @@ public class TestTags { tags = TestCoprocessorForTags.tags; assertEquals(4L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(1, tags.size()); - assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag2", Bytes.toString(LimitedPrivateTagUtil.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -466,7 +466,7 @@ public class TestTags { kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(1, tags.size()); - assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag1", Bytes.toString(LimitedPrivateTagUtil.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -483,7 +483,7 @@ public class TestTags { // We cannot assume the ordering of tags tagValues.clear(); for (Tag tag: tags) { - tagValues.add(Bytes.toString(TagUtil.cloneValue(tag))); + tagValues.add(Bytes.toString(LimitedPrivateTagUtil.cloneValue(tag))); } assertTrue(tagValues.contains("tag1")); assertTrue(tagValues.contains("tag2")); @@ -504,7 +504,7 @@ public class TestTags { kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(1, tags.size()); - assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tag2", Bytes.toString(LimitedPrivateTagUtil.cloneValue(tags.get(0)))); } finally { TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -614,7 +614,7 @@ public class TestTags { CellScanner cellScanner = result.cellScanner(); if (cellScanner.advance()) { Cell cell = cellScanner.current(); - tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + tags = LimitedPrivateTagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java index 8a246be..e2899e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java @@ -31,8 +31,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ByteBufferKeyValue; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.codec.Codec.Decoder; import org.apache.hadoop.hbase.codec.Codec.Encoder; @@ -84,7 +84,7 @@ public class TestWALCellCodecWithCompression { KeyValue kv = (KeyValue) decoder.current(); List tags = kv.getTags(); assertEquals(1, tags.size()); - assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tagValue1", Bytes.toString(LimitedPrivateTagUtil.cloneValue(tags.get(0)))); decoder.advance(); kv = (KeyValue) decoder.current(); tags = kv.getTags(); @@ -93,8 +93,8 @@ public class TestWALCellCodecWithCompression { kv = (KeyValue) decoder.current(); tags = kv.getTags(); assertEquals(2, tags.size()); - assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); - assertEquals("tagValue2", Bytes.toString(TagUtil.cloneValue(tags.get(1)))); + assertEquals("tagValue1", Bytes.toString(LimitedPrivateTagUtil.cloneValue(tags.get(0)))); + assertEquals("tagValue2", Bytes.toString(LimitedPrivateTagUtil.cloneValue(tags.get(1)))); } private KeyValue createKV(int noOfTags) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index e2a393a..22affa2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -29,8 +29,10 @@ import java.util.Optional; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -40,8 +42,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -57,10 +57,10 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; -import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -252,7 +252,8 @@ public class TestReplicationWithTags { // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { Cell cell = results.get(0); - tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + tags = LimitedPrivateTagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index 93bbc42..7c561b5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -35,16 +35,14 @@ import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; -import org.apache.hadoop.hbase.PrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; @@ -63,6 +61,7 @@ import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionN import org.apache.hadoop.hbase.security.visibility.expression.Operator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; /** * This is a VisibilityLabelService where labels in Mutation's visibility @@ -285,7 +284,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer boolean visibilityTagPresent = false; // Save an object allocation where we can if (cell.getTagsLength() > 0) { - Iterator tagsItr = PrivateCellUtil.tagsIterator(cell); + Iterator tagsItr = LimitedPrivateCellUtil.tagsIterator(cell); while (tagsItr.hasNext()) { boolean includeKV = true; Tag tag = tagsItr.next(); @@ -430,7 +429,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer for (Tag tag : deleteVisTags) { matchFound = false; for (Tag givenTag : putVisTags) { - if (TagUtil.matchingValue(tag, givenTag)) { + if (LimitedPrivateTagUtil.matchingValue(tag, givenTag)) { matchFound = true; break; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java index 398be48..23649c8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java @@ -31,13 +31,13 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -182,7 +182,7 @@ public class TestVisibilityLabelReplicationWithExpAsString extends TestVisibilit boolean foundNonVisTag = false; for(Tag t : TestCoprocessorForTagsAtSink.tags) { if(t.getType() == NON_VIS_TAG_TYPE) { - assertEquals(TEMP, Bytes.toString(TagUtil.cloneValue(t))); + assertEquals(TEMP, Bytes.toString(LimitedPrivateTagUtil.cloneValue(t))); foundNonVisTag = true; break; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java index 99525e2..43a9dbd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java @@ -32,22 +32,22 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -65,7 +65,6 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; -import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.security.User; @@ -73,6 +72,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; +import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.junit.Assert; @@ -286,11 +286,11 @@ public class TestVisibilityLabelsReplication { for (Cell cell : cells) { if ((Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0, row.length))) { - List tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tags = LimitedPrivateTagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); for (Tag tag : tags) { if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) { - assertEquals(visTag, TagUtil.getValueAsString(tag)); + assertEquals(visTag, LimitedPrivateTagUtil.getValueAsString(tag)); tagFound = true; break; } @@ -332,7 +332,7 @@ public class TestVisibilityLabelsReplication { boolean foundNonVisTag = false; for (Tag t : TestCoprocessorForTagsAtSink.tags) { if (t.getType() == NON_VIS_TAG_TYPE) { - assertEquals(TEMP, TagUtil.getValueAsString(t)); + assertEquals(TEMP, LimitedPrivateTagUtil.getValueAsString(t)); foundNonVisTag = true; break; } @@ -444,7 +444,8 @@ public class TestVisibilityLabelsReplication { // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { Cell cell = results.get(0); - tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + tags = LimitedPrivateTagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java index ada3ba5..b311cc6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java @@ -31,13 +31,12 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateTagUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; -import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -137,7 +136,7 @@ public class HFileTestUtil { kv = MobUtils.createMobRefCell(kv, key, tableNameTag); // verify that the kv has the tag. - Tag t = PrivateCellUtil.getTag(kv, TagType.MOB_TABLE_NAME_TAG_TYPE); + Tag t = LimitedPrivateCellUtil.getTag(kv, TagType.MOB_TABLE_NAME_TAG_TYPE); if (t == null) { throw new IllegalStateException("Tag didn't stick to KV " + kv.toString()); } @@ -161,12 +160,12 @@ public class HFileTestUtil { ResultScanner s = table.getScanner(new Scan()); for (Result r : s) { for (Cell c : r.listCells()) { - Tag t = PrivateCellUtil.getTag(c, TagType.MOB_TABLE_NAME_TAG_TYPE); + Tag t = LimitedPrivateCellUtil.getTag(c, TagType.MOB_TABLE_NAME_TAG_TYPE); if (t == null) { fail(c.toString() + " has null tag"); continue; } - byte[] tval = TagUtil.cloneValue(t); + byte[] tval = LimitedPrivateTagUtil.cloneValue(t); assertArrayEquals(c.toString() + " has tag" + Bytes.toString(tval), r.getRow(), tval); } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 4e15a9c..67af1d7 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -27,15 +27,14 @@ import java.util.List; import java.util.Map; import org.apache.commons.collections4.MapUtils; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; -import org.apache.hadoop.hbase.PrivateCellUtil; +import org.apache.hadoop.hbase.LimitedPrivateCellUtil; import org.apache.hadoop.hbase.ServerName; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan.ReadType; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.CellVisibility; @@ -71,6 +69,7 @@ import org.apache.hadoop.hbase.thrift2.generated.TScan; import org.apache.hadoop.hbase.thrift2.generated.TServerName; import org.apache.hadoop.hbase.thrift2.generated.TTimeRange; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class ThriftUtilities { @@ -172,7 +171,7 @@ public class ThriftUtilities { col.setTimestamp(kv.getTimestamp()); col.setValue(CellUtil.cloneValue(kv)); if (kv.getTagsLength() > 0) { - col.setTags(PrivateCellUtil.getTagsArray(kv)); + col.setTags(LimitedPrivateCellUtil.getTagsArray(kv)); } columnValues.add(col); }