diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java index 665c59c..d25e3be 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -124,7 +125,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C * @param qualifier * @param ts * @param value - * @param tags - Specify the Tags as an Array {@link KeyValue.Tag} + * @param tags - Specify the Tags as an Array * @return a KeyValue with this objects row key and the Put identifier. */ KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts, byte[] value, Tag[] tags) { @@ -138,7 +139,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C * @return a KeyValue with this objects row key and the Put identifier. */ KeyValue createPutKeyValue(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value, - Tag[] tags) { + Tag[] tags) { return new KeyValue(this.row, 0, this.row == null ? 0 : this.row.length, family, 0, family == null ? 0 : family.length, qualifier, ts, KeyValue.Type.Put, value, tags != null ? Arrays.asList(tags) : null); @@ -219,11 +220,11 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C c.getQualifierLength())); stringMap.put("timestamp", c.getTimestamp()); stringMap.put("vlen", c.getValueLength()); - List tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + List tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); if (tags != null) { List tagsString = new ArrayList(); for (Tag t : tags) { - tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(t.getValue())); + tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(t))); } stringMap.put("tag", tagsString); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index b2d5994..7d4c366 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -53,6 +53,8 @@ import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Consistency; @@ -583,17 +585,14 @@ public final class ProtobufUtil { byte[] tags; if (qv.hasTags()) { tags = qv.getTags().toByteArray(); - Object[] array = Tag.asList(tags, 0, (short)tags.length).toArray(); - Tag[] tagArray = new Tag[array.length]; - for(int i = 0; i< array.length; i++) { - tagArray[i] = (Tag)array[i]; - } if(qv.hasDeleteType()) { byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, fromDeleteType(qv.getDeleteType()), null, tags)); } else { - put.addImmutable(family, qualifier, ts, value, tagArray); + List t = TagUtil.asList(tags, 0, (short)tags.length); + Tag[] tagArray = new Tag[t.size()]; + put.addImmutable(family, qualifier, ts, value, t.toArray(tagArray)); } } else { if(qv.hasDeleteType()) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java new file mode 100644 index 0000000..71eb54c --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java @@ -0,0 +1,137 @@ +/** + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import java.nio.ByteBuffer; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Bytes; +/** + * Tags are part of cells and helps to add metadata about the KVs. + * Metadata could be ACLs per cells, visibility labels, etc. + */ +@InterfaceAudience.Private +@InterfaceStability.Evolving +public class ArrayBackedTag implements Tag { + private final byte type;// TODO extra type state needed? + private final byte[] bytes; + private int offset = 0; + private int length = 0; + + /** + * The special tag will write the length of each tag and that will be + * followed by the type and then the actual tag. + * So every time the length part is parsed we need to add + 1 byte to it to + * get the type and then get the actual tag. + */ + public ArrayBackedTag(byte tagType, String tag) { + this(tagType, Bytes.toBytes(tag)); + } + + /** + * Format for a tag : + * {@code } tag length is serialized + * using 2 bytes only but as this will be unsigned, we can have max tag length of + * (Short.MAX_SIZE * 2) +1. It includes 1 byte type length and actual tag bytes length. + */ + public ArrayBackedTag(byte tagType, byte[] tag) { + int tagLength = tag.length + TYPE_LENGTH_SIZE; + if (tagLength > MAX_TAG_LENGTH) { + throw new IllegalArgumentException( + "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH); + } + length = TAG_LENGTH_SIZE + tagLength; + bytes = new byte[length]; + int pos = Bytes.putAsShort(bytes, 0, tagLength); + pos = Bytes.putByte(bytes, pos, tagType); + Bytes.putBytes(bytes, pos, tag, 0, tag.length); + this.type = tagType; + } + + /** + * Creates a Tag from the specified byte array and offset. Presumes + * bytes content starting at offset is formatted as + * a Tag blob. + * The bytes to include the tag type, tag length and actual tag bytes. + * @param offset offset to start of Tag + */ + public ArrayBackedTag(byte[] bytes, int offset) { + this(bytes, offset, getLength(bytes, offset)); + } + + private static int getLength(byte[] bytes, int offset) { + return TAG_LENGTH_SIZE + Bytes.readAsInt(bytes, offset, TAG_LENGTH_SIZE); + } + + /** + * Creates a Tag from the specified byte array, starting at offset, and for length + * length. Presumes bytes content starting at offset is + * formatted as a Tag blob. + */ + public ArrayBackedTag(byte[] bytes, int offset, int length) { + if (length > MAX_TAG_LENGTH) { + throw new IllegalArgumentException( + "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH); + } + this.bytes = bytes; + this.offset = offset; + this.length = length; + this.type = bytes[offset + TAG_LENGTH_SIZE]; + } + + /** + * @return The byte array backing this Tag. + */ + public byte[] getValueArray() { + return this.bytes; + } + + /** + * @return the tag type + */ + public byte getType() { + return this.type; + } + + /** + * @return Length of actual tag bytes within the backed buffer + */ + public int getValueLength() { + return this.length - INFRASTRUCTURE_SIZE; + } + + /** + * @return Offset of actual tag bytes within the backed buffer + */ + public int getValueOffset() { + return this.offset + INFRASTRUCTURE_SIZE; + } + + @Override + public boolean hasArray() { + return true; + } + + @Override + public ByteBuffer getValueByteBuffer() { + return ByteBuffer.wrap(bytes); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 0d34137..0b03134 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -19,11 +19,13 @@ package org.apache.hadoop.hbase; import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY; +import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; import java.io.DataOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; @@ -108,8 +110,8 @@ public final class CellUtil { /** * Returns tag value in a new byte array. If server-side, use - * {@link Tag#getBuffer()} with appropriate {@link Tag#getTagOffset()} and - * {@link Tag#getTagLength()} instead to save on allocations. + * {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()} and + * {@link Tag#getValueLength()} instead to save on allocations. * @param cell * @return tag value in a new byte array. */ @@ -749,6 +751,8 @@ public final class CellUtil { * @param offset * @param length * @return iterator for the tags + * @deprecated As of 2.0.0 and will be removed in 3.0.0 + * Instead use {@link #tagsIterator(Cell)} */ public static Iterator tagsIterator(final byte[] tags, final int offset, final int length) { return new Iterator() { @@ -764,7 +768,7 @@ public final class CellUtil { public Tag next() { if (hasNext()) { int curTagLen = Bytes.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); - Tag tag = new Tag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE); + Tag tag = new ArrayBackedTag(tags, pos, curTagLen + TAG_LENGTH_SIZE); this.pos += Bytes.SIZEOF_SHORT + curTagLen; return tag; } @@ -778,6 +782,114 @@ public final class CellUtil { }; } + private static Iterator tagsIterator(final ByteBuffer tags, final int offset, final int length) { + return new Iterator() { + private int pos = offset; + private int endOffset = offset + length - 1; + + @Override + public boolean hasNext() { + return this.pos < endOffset; + } + + @Override + public Tag next() { + if (hasNext()) { + int curTagLen = ByteBufferUtils.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE); + Tag tag = new OffheapTag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE); + this.pos += Bytes.SIZEOF_SHORT + curTagLen; + return tag; + } + return null; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + + private static final Iterator EMPTY_TAGS_ITR = new Iterator() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public Tag next() { + return null; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + + /** + * Util method to iterate through the tags in the given cell. + * + * @param cell + * @return iterator for the tags + */ + public static Iterator tagsIterator(final Cell cell) { + final int tagsLength = cell.getTagsLength(); + // Save an object allocation where we can + if (tagsLength == 0) { + return EMPTY_TAGS_ITR; + } + if (cell instanceof ByteBufferedCell) { + return tagsIterator(((ByteBufferedCell) cell).getTagsByteBuffer(), + ((ByteBufferedCell) cell).getTagsPosition(), tagsLength); + } + return tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), tagsLength); + } + + /** + * @param cell + * @return Tags in the given Cell as a List + */ + public static List getTags(Cell cell) { + List tags = new ArrayList(); + Iterator tagsItr = tagsIterator(cell); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } + + /** + * Retrieve Cell's first tag, matching the passed in type + * + * @param cell + * @param type + * @return null if there is no tag of the passed in tag type + */ + public static Tag getTag(Cell cell, byte type){ + boolean bufferBacked = cell instanceof ByteBufferedCell; + int length = cell.getTagsLength(); + int offset = bufferBacked? ((ByteBufferedCell)cell).getTagsPosition():cell.getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen; + if (bufferBacked) { + ByteBuffer tagsBuffer = ((ByteBufferedCell)cell).getTagsByteBuffer(); + tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE); + if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) { + return new OffheapTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE); + } + } else { + tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE); + if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { + return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE); + } + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return null; + } + /** * Returns true if the first range start1...end1 overlaps with the second range * start2...end2, assuming the byte arrays represent row keys diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 933dd1d..a30a24c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -894,7 +894,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, int tagsLength = 0; if (tags != null && tags.length > 0) { for (Tag t: tags) { - tagsLength += t.getLength(); + tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE; } } checkForTagsLength(tagsLength); @@ -928,7 +928,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, if (tagsLength > 0) { pos = Bytes.putAsShort(buffer, pos, tagsLength); for (Tag t : tags) { - pos = Bytes.putBytes(buffer, pos, t.getBuffer(), t.getOffset(), t.getLength()); + int tlen = t.getValueLength(); + pos = Bytes.putAsShort(buffer, pos, tlen + Tag.TYPE_LENGTH_SIZE); + pos = Bytes.putByte(buffer, pos, t.getType()); + TagUtil.copyValueTo(t, buffer, pos); + pos += tlen; } } return keyValueLength; @@ -1013,7 +1017,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, int tagsLength = 0; if (tags != null && !tags.isEmpty()) { for (Tag t : tags) { - tagsLength += t.getLength(); + tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE; } } checkForTagsLength(tagsLength); @@ -1053,7 +1057,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, if (tagsLength > 0) { pos = Bytes.putAsShort(bytes, pos, tagsLength); for (Tag t : tags) { - pos = Bytes.putBytes(bytes, pos, t.getBuffer(), t.getOffset(), t.getLength()); + int tlen = t.getValueLength(); + pos = Bytes.putAsShort(bytes, pos, tlen + Tag.TYPE_LENGTH_SIZE); + pos = Bytes.putByte(bytes, pos, t.getType()); + TagUtil.copyValueTo(t, bytes, pos); + pos += tlen; } } return bytes; @@ -1176,7 +1184,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, if (tags != null) { List tagsString = new ArrayList(); for (Tag t : tags) { - tagsString.add((t.getType()) + ":" +Bytes.toStringBinary(t.getValue())); + tagsString.add((t.getType()) + ":" + TagUtil.getValueAsString(t)); } stringMap.put("tag", tagsString); } @@ -1558,7 +1566,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, if (tagsLength == 0) { return EMPTY_ARRAY_LIST; } - return Tag.asList(getTagsArray(), getTagsOffset(), tagsLength); + return TagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength); } /** @@ -2386,7 +2394,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, public static KeyValue cloneAndAddTags(Cell c, List newTags) { List existingTags = null; if(c.getTagsLength() > 0) { - existingTags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + existingTags = CellUtil.getTags(c); existingTags.addAll(newTags); } else { existingTags = newTags; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapTag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapTag.java new file mode 100644 index 0000000..dee7197 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapTag.java @@ -0,0 +1,72 @@ +/** + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import java.nio.ByteBuffer; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.ByteBufferUtils; + +@InterfaceAudience.Private +@InterfaceStability.Evolving +public class OffheapTag implements Tag { + + private ByteBuffer buffer; + private int offset, length; + private byte type; + + public OffheapTag(ByteBuffer buffer, int offset, int length) { + this.buffer = buffer; + this.offset = offset; + this.length = length; + this.type = ByteBufferUtils.toByte(buffer, offset + TAG_LENGTH_SIZE); + } + + @Override + public byte getType() { + return this.type; + } + + @Override + public int getValueOffset() { + return this.offset + INFRASTRUCTURE_SIZE; + } + + @Override + public int getValueLength() { + return this.length - INFRASTRUCTURE_SIZE; + } + + @Override + public boolean hasArray() { + return false; + } + + @Override + public byte[] getValueArray() { + throw new UnsupportedOperationException(); + } + + @Override + public ByteBuffer getValueByteBuffer() { + return this.buffer; + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java index 36b87b1..0deb833 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java @@ -19,201 +19,56 @@ */ package org.apache.hadoop.hbase; -import java.util.ArrayList; -import java.util.List; +import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; + /** - * Tags are part of cells and helps to add metadata about the KVs. + * Tags are part of cells and helps to add metadata about them. * Metadata could be ACLs per cells, visibility labels, etc. */ @InterfaceAudience.Private @InterfaceStability.Evolving -public class Tag { +public interface Tag { + public final static int TYPE_LENGTH_SIZE = Bytes.SIZEOF_BYTE; public final static int TAG_LENGTH_SIZE = Bytes.SIZEOF_SHORT; public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE; public static final int MAX_TAG_LENGTH = (2 * Short.MAX_VALUE) + 1 - TAG_LENGTH_SIZE; - private final byte type; - private final byte[] bytes; - private int offset = 0; - private int length = 0; - - /** - * The special tag will write the length of each tag and that will be - * followed by the type and then the actual tag. - * So every time the length part is parsed we need to add + 1 byte to it to - * get the type and then get the actual tag. - */ - public Tag(byte tagType, String tag) { - this(tagType, Bytes.toBytes(tag)); - } - - /** - * Format for a tag : - * {@code } tag length is serialized - * using 2 bytes only but as this will be unsigned, we can have max tag length of - * (Short.MAX_SIZE * 2) +1. It includes 1 byte type length and actual tag bytes length. - */ - public Tag(byte tagType, byte[] tag) { - int tagLength = tag.length + TYPE_LENGTH_SIZE; - if (tagLength > MAX_TAG_LENGTH) { - throw new IllegalArgumentException( - "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH); - } - length = TAG_LENGTH_SIZE + tagLength; - bytes = new byte[length]; - int pos = Bytes.putAsShort(bytes, 0, tagLength); - pos = Bytes.putByte(bytes, pos, tagType); - Bytes.putBytes(bytes, pos, tag, 0, tag.length); - this.type = tagType; - } - - /** - * Creates a Tag from the specified byte array and offset. Presumes - * bytes content starting at offset is formatted as - * a Tag blob. - * The bytes to include the tag type, tag length and actual tag bytes. - * @param offset offset to start of Tag - */ - public Tag(byte[] bytes, int offset) { - this(bytes, offset, getLength(bytes, offset)); - } - - private static int getLength(byte[] bytes, int offset) { - return TAG_LENGTH_SIZE + Bytes.readAsInt(bytes, offset, TAG_LENGTH_SIZE); - } - - /** - * Creates a Tag from the specified byte array, starting at offset, and for length - * length. Presumes bytes content starting at offset is - * formatted as a Tag blob. - */ - public Tag(byte[] bytes, int offset, int length) { - if (length > MAX_TAG_LENGTH) { - throw new IllegalArgumentException( - "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH); - } - this.bytes = bytes; - this.offset = offset; - this.length = length; - this.type = bytes[offset + TAG_LENGTH_SIZE]; - } - - /** - * @return The byte array backing this Tag. - */ - public byte[] getBuffer() { - return this.bytes; - } - /** * @return the tag type */ - public byte getType() { - return this.type; - } - - /** - * @return Length of actual tag bytes within the backed buffer - */ - public int getTagLength() { - return this.length - INFRASTRUCTURE_SIZE; - } - - /** - * @return Offset of actual tag bytes within the backed buffer - */ - public int getTagOffset() { - return this.offset + INFRASTRUCTURE_SIZE; - } - - /** - * Returns tag value in a new byte array. - * Primarily for use client-side. If server-side, use - * {@link #getBuffer()} with appropriate {@link #getTagOffset()} and {@link #getTagLength()} - * instead to save on allocations. - * @return tag value in a new byte array. - */ - public byte[] getValue() { - int tagLength = getTagLength(); - byte[] tag = new byte[tagLength]; - Bytes.putBytes(tag, 0, bytes, getTagOffset(), tagLength); - return tag; - } + byte getType(); /** - * Creates the list of tags from the byte array b. Expected that b is in the - * expected tag format - * @param b - * @param offset - * @param length - * @return List of tags + * @return Offset of tag value within the backed buffer */ - public static List asList(byte[] b, int offset, int length) { - List tags = new ArrayList(); - int pos = offset; - while (pos < offset + length) { - int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE); - tags.add(new Tag(b, pos, tagLen + TAG_LENGTH_SIZE)); - pos += TAG_LENGTH_SIZE + tagLen; - } - return tags; - } + int getValueOffset(); /** - * Write a list of tags into a byte array - * @param tags - * @return the serialized tag data as bytes + * @return Length of tag value within the backed buffer */ - public static byte[] fromList(List tags) { - int length = 0; - for (Tag tag: tags) { - length += tag.length; - } - byte[] b = new byte[length]; - int pos = 0; - for (Tag tag: tags) { - System.arraycopy(tag.bytes, tag.offset, b, pos, tag.length); - pos += tag.length; - } - return b; - } + int getValueLength(); /** - * Retrieve the first tag from the tags byte array matching the passed in tag type - * @param b - * @param offset - * @param length - * @param type - * @return null if there is no tag of the passed in tag type + * Tells whether or not this Tag is backed by a byte array. + * @return true when this Tag is backed by byte array */ - public static Tag getTag(byte[] b, int offset, int length, byte type) { - int pos = offset; - while (pos < offset + length) { - int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE); - if(b[pos + TAG_LENGTH_SIZE] == type) { - return new Tag(b, pos, tagLen + TAG_LENGTH_SIZE); - } - pos += TAG_LENGTH_SIZE + tagLen; - } - return null; - } + boolean hasArray(); /** - * Returns the total length of the entire tag entity + * @return The array containing the value bytes. + * @throws UnsupportedOperationException + * when {@link #hasArray()} return false. Use {@link #getValueByteBuffer()} in such + * situation */ - int getLength() { - return this.length; - } + byte[] getValueArray(); /** - * Returns the offset of the entire tag entity + * @return The {@link java.nio.ByteBuffer} containing the value bytes. */ - int getOffset() { - return this.offset; - } + ByteBuffer getValueByteBuffer(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java new file mode 100644 index 0000000..86cdf75 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java @@ -0,0 +1,188 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.util.StreamUtils; +import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; + +@InterfaceAudience.Private +public class TagUtil { + + // TODO have a creator method + /** + * Private constructor to keep this class from being instantiated. + */ + private TagUtil(){} + + /** + * Returns tag value in a new byte array. + * Primarily for use client-side. If server-side, use + * {@link Tag#getArray()} with appropriate {@link Tag#getTagOffset()} + * and {@link Tag#getTagLength()} instead to save on allocations. + * + * @return tag value in a new byte array. + */ + public static byte[] cloneValue(Tag tag) { + int tagLength = tag.getValueLength(); + byte[] tagArr = new byte[tagLength]; + if (tag.hasArray()) { + Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength); + } else { + ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(), + 0, tagLength); + } + return tagArr; + } + + /** + * Creates list of tags from given byte array, expected that it is in the expected tag format. + * + * @param b + * @param offset + * @param length + * @return List of tags + */ + public static List asList(byte[] b, int offset, int length) { + List tags = new ArrayList(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE); + tags.add(new ArrayBackedTag(b, pos, tagLen + TAG_LENGTH_SIZE)); + pos += TAG_LENGTH_SIZE + tagLen; + } + return tags; + } + + /** + * Creates list of tags from given ByteBuffer, expected that it is in the expected tag format. + * + * @param b + * @param offset + * @param length + * @return List of tags + */ + public static List asList(ByteBuffer b, int offset, int length) { + List tags = new ArrayList(); + int pos = offset; + while (pos < offset + length) { + int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE); + tags.add(new OffheapTag(b, pos, tagLen + TAG_LENGTH_SIZE)); + pos += TAG_LENGTH_SIZE + tagLen; + } + return tags; + } + + /** + * Write a list of tags into a byte array + * + * @param tags + * @return the serialized tag data as bytes + */ + public static byte[] fromList(List tags) { + if (tags.size() == 0) return HConstants.EMPTY_BYTE_ARRAY; + int length = 0; + for (Tag tag : tags) { + length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE; + } + byte[] b = new byte[length]; + int pos = 0; + int tlen; + for (Tag tag : tags) { + tlen = tag.getValueLength(); + pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE); + pos = Bytes.putByte(b, pos, tag.getType()); + if (tag.hasArray()) { + pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen); + } else { + ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(), + pos, tlen); + pos += tlen; + } + } + return b; + } + + /** + * Converts the value bytes of the given tag into a long value + * @param t + * @return value as long + */ + public static long getValueAsLong(Tag t) { + if (t.hasArray()) { + return Bytes.toLong(t.getValueArray(), t.getValueOffset(), t.getValueLength()); + } + return ByteBufferUtils.toLong(t.getValueByteBuffer(), t.getValueOffset()); + } + + public static byte getValueAsByte(Tag t) { + if (t.hasArray()) { + return t.getValueArray()[t.getValueOffset()]; + } + return ByteBufferUtils.toByte(t.getValueByteBuffer(), t.getValueOffset()); + } + + public static String getValueAsString(Tag tag){ + if(tag.hasArray()){ + return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); + } + return Bytes.toString(cloneValue(tag)); + } + + public static boolean matchingValue(Tag t1, Tag t2) { + if (t1.hasArray() && t2.hasArray()) { + return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(), + t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); + } + if (t1.hasArray()) { + return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(), + t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength()); + } + if (t2.hasArray()) { + return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), + t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); + } + return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(), + t2.getValueArray(), t2.getValueOffset(), t2.getValueLength()); + } + + public static void copyValueTo(Tag tag, byte[] out, int offset) { + if (tag.hasArray()) { + Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); + } else { + ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(), + offset, tag.getValueLength()); + } + } + + public static Pair readVIntValuePart(Tag tag, int offset) throws IOException { + if (tag.hasArray()) { + return StreamUtils.readRawVarint32(tag.getValueArray(), offset); + } + return StreamUtils.readRawVarint32(tag.getValueByteBuffer(), offset); + } +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java index 6e13b44..2216342 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io.util; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; @@ -169,6 +170,46 @@ public class StreamUtils { return new Pair(result, newOffset - offset); } + public static Pair readRawVarint32(ByteBuffer input, int offset) throws IOException { + int newOffset = offset; + byte tmp = input.get(newOffset++); + if (tmp >= 0) { + return new Pair((int) tmp, newOffset - offset); + } + int result = tmp & 0x7f; + tmp = input.get(newOffset++); + if (tmp >= 0) { + result |= tmp << 7; + } else { + result |= (tmp & 0x7f) << 7; + tmp = input.get(newOffset++); + if (tmp >= 0) { + result |= tmp << 14; + } else { + result |= (tmp & 0x7f) << 14; + tmp = input.get(newOffset++); + if (tmp >= 0) { + result |= tmp << 21; + } else { + result |= (tmp & 0x7f) << 21; + tmp = input.get(newOffset++); + result |= tmp << 28; + if (tmp < 0) { + // Discard upper 32 bits. + for (int i = 0; i < 5; i++) { + tmp = input.get(newOffset++); + if (tmp >= 0) { + return new Pair(result, newOffset - offset); + } + } + throw new IOException("Malformed varint"); + } + } + } + } + return new Pair(result, newOffset - offset); + } + public static short toShort(byte hi, byte lo) { short s = (short) (((hi & 0xFF) << 8) | (lo & 0xFF)); Preconditions.checkArgument(s >= 0); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java index 7bcc872..3e4b2c0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java @@ -751,6 +751,29 @@ public final class ByteBufferUtils { } /** + * Converts a ByteBuffer to an int value + * + * @param buf + * @param offset + * @param length + * @return the int value + * @throws IllegalArgumentException + * if there's not enough bytes left in the buffer after the given offset + */ + public static int readAsInt(ByteBuffer buf, int offset, final int length) { + if (offset + length > buf.limit()) { + throw new IllegalArgumentException("offset (" + offset + ") + length (" + length + + ") exceed the" + " limit of the buffer: " + buf.limit()); + } + int n = 0; + for(int i = offset; i < (offset + length); i++) { + n <<= 8; + n ^= toByte(buf, i) & 0xFF; + } + return n; + } + + /** * Reads a long value at the given buffer's offset. * @param buffer * @param offset diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java index b44a724..e38466b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.OffheapKeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.io.WritableUtils; @@ -280,8 +281,8 @@ public class RedundantKVGenerator { } if (useTags) { - result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new Tag( - (byte) 1, "value1") })); + result.add(new KeyValue(row, family, qualifier, timestamp, value, + new Tag[] { new ArrayBackedTag((byte) 1, "value1") })); } else { result.add(new KeyValue(row, family, qualifier, timestamp, value)); } @@ -365,7 +366,7 @@ public class RedundantKVGenerator { } if (useTags) { KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value, - new Tag[] { new Tag((byte) 1, "value1") }); + new Tag[] { new ArrayBackedTag((byte) 1, "value1") }); ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength()); ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(), keyValue.getOffset(), keyValue.getLength()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index cc1e511..e233348 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -442,7 +442,7 @@ public class TestKeyValue extends TestCase { byte[] metaValue1 = Bytes.toBytes("metaValue1"); byte[] metaValue2 = Bytes.toBytes("metaValue2"); KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] { - new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) }); + new ArrayBackedTag((byte) 1, metaValue1), new ArrayBackedTag((byte) 2, metaValue2) }); assertTrue(kv.getTagsLength() > 0); assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0, row.length)); @@ -458,44 +458,42 @@ public class TestKeyValue extends TestCase { boolean meta1Ok = false, meta2Ok = false; for (Tag tag : tags) { if (tag.getType() == (byte) 1) { - if (Bytes.equals(tag.getValue(), metaValue1)) { + if (Bytes.equals(TagUtil.cloneValue(tag), metaValue1)) { meta1Ok = true; } } else { - if (Bytes.equals(tag.getValue(), metaValue2)) { + if (Bytes.equals(TagUtil.cloneValue(tag), metaValue2)) { meta2Ok = true; } } } assertTrue(meta1Ok); assertTrue(meta2Ok); - Iterator tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(), - kv.getTagsLength()); + Iterator tagItr = CellUtil.tagsIterator(kv); //Iterator tagItr = kv.tagsIterator(); assertTrue(tagItr.hasNext()); Tag next = tagItr.next(); - assertEquals(10, next.getTagLength()); + assertEquals(10, next.getValueLength()); assertEquals((byte) 1, next.getType()); - Bytes.equals(next.getValue(), metaValue1); + Bytes.equals(TagUtil.cloneValue(next), metaValue1); assertTrue(tagItr.hasNext()); next = tagItr.next(); - assertEquals(10, next.getTagLength()); + assertEquals(10, next.getValueLength()); assertEquals((byte) 2, next.getType()); - Bytes.equals(next.getValue(), metaValue2); + Bytes.equals(TagUtil.cloneValue(next), metaValue2); assertFalse(tagItr.hasNext()); - tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(), - kv.getTagsLength()); + tagItr = CellUtil.tagsIterator(kv); assertTrue(tagItr.hasNext()); next = tagItr.next(); - assertEquals(10, next.getTagLength()); + assertEquals(10, next.getValueLength()); assertEquals((byte) 1, next.getType()); - Bytes.equals(next.getValue(), metaValue1); + Bytes.equals(TagUtil.cloneValue(next), metaValue1); assertTrue(tagItr.hasNext()); next = tagItr.next(); - assertEquals(10, next.getTagLength()); + assertEquals(10, next.getValueLength()); assertEquals((byte) 2, next.getType()); - Bytes.equals(next.getValue(), metaValue2); + Bytes.equals(TagUtil.cloneValue(next), metaValue2); assertFalse(tagItr.hasNext()); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java index f021215..9e76fc5 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java @@ -43,8 +43,8 @@ public class TestOffheapKeyValue { private static final byte[] fam2 = Bytes.toBytes(FAM2); private static final byte[] qual1 = Bytes.toBytes(QUAL1); private static final byte[] qual2 = Bytes.toBytes(QUAL2); - private static final Tag t1 = new Tag((byte) 1, Bytes.toBytes("TAG1")); - private static final Tag t2 = new Tag((byte) 2, Bytes.toBytes("TAG2")); + private static final Tag t1 = new ArrayBackedTag((byte) 1, Bytes.toBytes("TAG1")); + private static final Tag t2 = new ArrayBackedTag((byte) 2, Bytes.toBytes("TAG2")); private static final ArrayList tags = new ArrayList(); static { tags.add(t1); @@ -158,17 +158,17 @@ public class TestOffheapKeyValue { assertEquals(0L, offheapKV.getTimestamp()); assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); // change tags to handle both onheap and offheap stuff - List resTags = - Tag.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(), offheapKV.getTagsLength()); + List resTags = TagUtil.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(), + offheapKV.getTagsLength()); Tag tag1 = resTags.get(0); assertEquals(t1.getType(), tag1.getType()); - assertEquals(Bytes.toString(t1.getValue()), Bytes.toString(getTagValue(tag1))); + assertEquals(TagUtil.getValueAsString(t1), TagUtil.getValueAsString(tag1)); Tag tag2 = resTags.get(1); assertEquals(tag2.getType(), tag2.getType()); - assertEquals(Bytes.toString(t2.getValue()), Bytes.toString(getTagValue(tag2))); - Tag res = Tag.getTag(offheapKV.getTagsArray(), 0, offheapKV.getTagsLength(), (byte) 2); - assertEquals(Bytes.toString(t2.getValue()), Bytes.toString(getTagValue(tag2))); - res = Tag.getTag(offheapKV.getTagsArray(), 0, offheapKV.getTagsLength(), (byte) 3); + assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2)); + Tag res = CellUtil.getTag(offheapKV, (byte) 2); + assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2)); + res = CellUtil.getTag(offheapKV, (byte) 3); assertNull(res); } @@ -195,11 +195,4 @@ public class TestOffheapKeyValue { assertEquals(0L, offheapKeyOnlyKV.getTimestamp()); assertEquals(Type.Put.getCode(), offheapKeyOnlyKV.getTypeByte()); } - // TODO : Can be moved to TagUtil - private static byte[] getTagValue(Tag tag) { - int tagLength = tag.getTagLength(); - byte[] tagBytes = new byte[tagLength]; - System.arraycopy(tag.getBuffer(), tag.getTagOffset(), tagBytes, 0, tagLength); - return tagBytes; - } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index beff87a..cc70742 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -33,6 +33,8 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -54,16 +56,16 @@ public class TestCellCodecWithTags { Codec.Encoder encoder = codec.getEncoder(dos); final Cell cell1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] { - new Tag((byte) 1, Bytes.toBytes("teststring1")), - new Tag((byte) 2, Bytes.toBytes("teststring2")) }); + new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) }); final Cell cell2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), - HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new Tag((byte) 1, + HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring3")), }); final Cell cell3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] { - new Tag((byte) 2, Bytes.toBytes("teststring4")), - new Tag((byte) 2, Bytes.toBytes("teststring5")), - new Tag((byte) 1, Bytes.toBytes("teststring6")) }); + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), + new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) }); encoder.write(cell1); encoder.write(cell2); @@ -77,36 +79,36 @@ public class TestCellCodecWithTags { assertTrue(decoder.advance()); Cell c = decoder.current(); assertTrue(CellUtil.equals(c, cell1)); - List tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + List tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(2, tags.size()); Tag tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, cell2)); - tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(1, tags.size()); tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, cell3)); - tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(3, tags.size()); tag = tags.get(0); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag))); tag = tags.get(2); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag))); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index 04fb9a9..238d0a6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -33,6 +33,8 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -54,16 +56,16 @@ public class TestKeyValueCodecWithTags { Codec.Encoder encoder = codec.getEncoder(dos); final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] { - new Tag((byte) 1, Bytes.toBytes("teststring1")), - new Tag((byte) 2, Bytes.toBytes("teststring2")) }); + new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) }); final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), - HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new Tag((byte) 1, + HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring3")), }); final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] { - new Tag((byte) 2, Bytes.toBytes("teststring4")), - new Tag((byte) 2, Bytes.toBytes("teststring5")), - new Tag((byte) 1, Bytes.toBytes("teststring6")) }); + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), + new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) }); encoder.write(kv1); encoder.write(kv2); @@ -77,36 +79,36 @@ public class TestKeyValueCodecWithTags { assertTrue(decoder.advance()); Cell c = decoder.current(); assertTrue(CellUtil.equals(c, kv1)); - List tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + List tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(2, tags.size()); Tag tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, kv2)); - tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(1, tags.size()); tag = tags.get(0); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag))); assertTrue(decoder.advance()); c = decoder.current(); assertTrue(CellUtil.equals(c, kv3)); - tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(3, tags.size()); tag = tags.get(0); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag))); tag = tags.get(1); assertEquals(2, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag))); tag = tags.get(2); assertEquals(1, tag.getType()); - assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), tag.getValue())); + assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag))); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java index f4c4afe..6c46cf2 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java @@ -28,6 +28,7 @@ import java.util.List; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.io.util.LRUDictionary; import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -97,7 +98,7 @@ public class TestTagCompressionContext { private KeyValue createKVWithTags(int noOfTags) { List tags = new ArrayList(); for (int i = 0; i < noOfTags; i++) { - tags.add(new Tag((byte) i, "tagValue" + i)); + tags.add(new ArrayBackedTag((byte) i, "tagValue" + i)); } KeyValue kv = new KeyValue(ROW, CF, Q, 1234L, V, tags); return kv; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java index bd2a29d..717e24c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java @@ -22,6 +22,7 @@ import java.util.List; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Assert; @@ -65,7 +66,7 @@ public class TestByteRangeWithKVSerialization { int kvCount = 1000000; List kvs = new ArrayList(kvCount); int totalSize = 0; - Tag[] tags = new Tag[] { new Tag((byte) 1, "tag1") }; + Tag[] tags = new Tag[] { new ArrayBackedTag((byte) 1, "tag1") }; for (int i = 0; i < kvCount; i++) { KeyValue kv = new KeyValue(Bytes.toBytes(i), FAMILY, QUALIFIER, i, VALUE, tags); kv.setSequenceId(i); diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java index 3c3699b..a615155 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java @@ -23,6 +23,7 @@ import java.util.List; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; @@ -46,9 +47,9 @@ public class TestRowDataTrivialWithTags extends BaseTestRowData{ static List d = Lists.newArrayList(); static { List tagList = new ArrayList(); - Tag t = new Tag((byte) 1, "visisbility"); + Tag t = new ArrayBackedTag((byte) 1, "visisbility"); tagList.add(t); - t = new Tag((byte) 2, "ACL"); + t = new ArrayBackedTag((byte) 2, "ACL"); tagList.add(t); d.add(new KeyValue(rA, cf, cq0, ts, v0, tagList)); d.add(new KeyValue(rB, cf, cq0, ts, v0, tagList)); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java index 8424bf9..dcd5b0a 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.BufferedMutator; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -1124,7 +1125,7 @@ public class PerformanceEvaluation extends Configured implements Tool { byte[] tag = generateData(this.rand, TAG_LENGTH); Tag[] tags = new Tag[noOfTags]; for (int n = 0; n < noOfTags; n++) { - Tag t = new Tag((byte) n, tag); + Tag t = new ArrayBackedTag((byte) n, tag); tags[n] = t; } KeyValue kv = new KeyValue(row, FAMILY_NAME, QUALIFIER_NAME, HConstants.LATEST_TIMESTAMP, @@ -1195,7 +1196,7 @@ public class PerformanceEvaluation extends Configured implements Tool { byte[] tag = generateData(this.rand, TAG_LENGTH); Tag[] tags = new Tag[noOfTags]; for (int n = 0; n < noOfTags; n++) { - Tag t = new Tag((byte) n, tag); + Tag t = new ArrayBackedTag((byte) n, tag); tags[n] = t; } KeyValue kv = new KeyValue(row, FAMILY_NAME, QUALIFIER_NAME, HConstants.LATEST_TIMESTAMP, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java index 86d183b..cc202d4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java @@ -59,10 +59,11 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.mob.MobUtils; @@ -367,11 +368,10 @@ public class HFilePrettyPrinter extends Configured implements Tool { + Bytes.toStringBinary(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); int i = 0; - List tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), + List tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); for (Tag tag : tags) { - System.out.print(String.format(" T[%d]: %s", i++, - Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()))); + System.out.print(String.format(" T[%d]: %s", i++, TagUtil.getValueAsString(tag))); } } System.out.println(); @@ -411,7 +411,7 @@ public class HFilePrettyPrinter extends Configured implements Tool { System.err.println("ERROR, wrong value format in mob reference cell " + CellUtil.getCellKeyAsString(cell)); } else { - TableName tn = TableName.valueOf(tnTag.getValue()); + TableName tn = TableName.valueOf(TagUtil.cloneValue(tnTag)); String mobFileName = MobUtils.getMobFileName(cell); boolean exist = mobFileExists(fs, tn, mobFileName, Bytes.toString(CellUtil.cloneFamily(cell)), foundMobFiles, missingMobFiles); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java index c201eb7..fea1eb3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -169,7 +170,7 @@ public class TextSortReducer extends // Add TTL directly to the KV so we can vary them when packing more than one KV // into puts if (ttl > 0) { - tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl))); + tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl))); } for (int i = 0; i < parsed.getColumnCount(); i++) { if (i == parser.getRowKeyColumnIndex() || i == parser.getTimestampKeyColumnIndex() diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java index 98dc25e..b0d3126 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java @@ -25,6 +25,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -170,7 +171,7 @@ extends Mapper // Add TTL directly to the KV so we can vary them when packing more than one KV // into puts if (ttl > 0) { - tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl))); + tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl))); } } Put put = new Put(rowKey.copyBytes()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java index f48bb94..f05a479 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; @@ -167,7 +168,8 @@ public class DefaultMobStoreCompactor extends DefaultCompactor { byte[] fileName = null; StoreFile.Writer mobFileWriter = null, delFileWriter = null; long mobCells = 0, deleteMarkersCount = 0; - Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName().getName()); + Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, + store.getTableName().getName()); long cellsCountCompactedToMob = 0, cellsCountCompactedFromMob = 0; long cellsSizeCompactedToMob = 0, cellsSizeCompactedFromMob = 0; try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java index ff350bf..a7148f2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.monitoring.MonitoredTask; @@ -166,8 +167,8 @@ public class DefaultMobStoreFlusher extends DefaultStoreFlusher { // the relative path is mobFiles byte[] fileName = Bytes.toBytes(mobFileWriter.getPath().getName()); try { - Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName() - .getName()); + Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, + store.getTableName().getName()); List cells = new ArrayList(); boolean hasMore; ScannerContext scannerContext = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java index 4bdfe97..811088e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.mob; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -66,7 +67,7 @@ public final class MobConstants { public static final String MOB_CACHE_EVICT_PERIOD = "hbase.mob.cache.evict.period"; public static final String MOB_CACHE_EVICT_REMAIN_RATIO = "hbase.mob.cache.evict.remain.ratio"; - public static final Tag MOB_REF_TAG = new Tag(TagType.MOB_REFERENCE_TAG_TYPE, + public static final Tag MOB_REF_TAG = new ArrayBackedTag(TagType.MOB_REFERENCE_TAG_TYPE, HConstants.EMPTY_BYTE_ARRAY); public static final float DEFAULT_EVICT_REMAIN_RATIO = 0.5f; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java index d654788..52a19f5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java @@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -122,8 +123,7 @@ public final class MobUtils { */ public static boolean isMobReferenceCell(Cell cell) { if (cell.getTagsLength() > 0) { - Tag tag = Tag.getTag(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), - TagType.MOB_REFERENCE_TAG_TYPE); + Tag tag = CellUtil.getTag(cell, TagType.MOB_REFERENCE_TAG_TYPE); return tag != null; } return false; @@ -136,9 +136,7 @@ public final class MobUtils { */ public static Tag getTableNameTag(Cell cell) { if (cell.getTagsLength() > 0) { - Tag tag = Tag.getTag(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), - TagType.MOB_TABLE_NAME_TAG_TYPE); - return tag; + return CellUtil.getTag(cell, TagType.MOB_TABLE_NAME_TAG_TYPE); } return null; } @@ -438,7 +436,7 @@ public final class MobUtils { // snapshot for mob files. tags.add(tableNameTag); // Add the existing tags. - tags.addAll(Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength())); + tags.addAll(CellUtil.getTags(cell)); int valueLength = cell.getValueLength(); byte[] refValue = Bytes.add(Bytes.toBytes(valueLength), fileName); KeyValue reference = new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index ab9ee7e..c0c1645 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Connection; @@ -113,7 +114,7 @@ public class PartitionedMobCompactor extends MobCompactor { Configuration copyOfConf = new Configuration(conf); copyOfConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0f); compactionCacheConfig = new CacheConfig(copyOfConf); - tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName()); + tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName()); cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java index 3daef7e..fc7691d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.BufferedMutator; @@ -156,8 +157,8 @@ public class MemStoreWrapper { scanner = snapshot.getScanner(); scanner.seek(KeyValueUtil.createFirstOnRow(HConstants.EMPTY_START_ROW)); cell = null; - Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, Bytes.toBytes(this.table.getName() - .toString())); + Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, + Bytes.toBytes(this.table.getName().toString())); long updatedCount = 0; while (null != (cell = scanner.next())) { KeyValue reference = MobUtils.createMobRefKeyValue(cell, referenceValue, tableNameTag); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java index faf6d81..496c7e2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; @@ -338,8 +339,7 @@ public class HMobStore extends HStore { String fileName = MobUtils.getMobFileName(reference); Tag tableNameTag = MobUtils.getTableNameTag(reference); if (tableNameTag != null) { - byte[] tableName = tableNameTag.getValue(); - String tableNameString = Bytes.toString(tableName); + String tableNameString = TagUtil.getValueAsString(tableNameTag); List locations = map.get(tableNameString); if (locations == null) { IdLock.Entry lockEntry = keyLock.getLockEntry(tableNameString.hashCode()); @@ -347,7 +347,7 @@ public class HMobStore extends HStore { locations = map.get(tableNameString); if (locations == null) { locations = new ArrayList(2); - TableName tn = TableName.valueOf(tableName); + TableName tn = TableName.valueOf(tableNameString); locations.add(MobUtils.getMobFamilyPath(conf, tn, family.getNameAsString())); locations.add(HFileArchiveUtil.getStoreArchivePath(conf, tn, MobUtils .getMobRegionInfo(tn).getEncodedName(), family.getNameAsString())); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 9549a13..bd3cc72 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -92,8 +92,10 @@ import org.apache.hadoop.hbase.RegionTooBusyException; import org.apache.hadoop.hbase.ShareableMemory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagRewriteCell; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.backup.HFileArchiver; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -3667,8 +3669,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi for (int i = 0; i < listSize; i++) { Cell cell = cells.get(i); List newTags = new ArrayList(); - Iterator tagIterator = CellUtil.tagsIterator(cell.getTagsArray(), - cell.getTagsOffset(), cell.getTagsLength()); + Iterator tagIterator = CellUtil.tagsIterator(cell); // Carry forward existing tags @@ -3685,11 +3686,11 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // above may change when there are more tag based features in core. if (m.getTTL() != Long.MAX_VALUE) { // Add a cell TTL tag - newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(m.getTTL()))); + newTags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(m.getTTL()))); } // Rewrite the cell with the updated set of tags - cells.set(i, new TagRewriteCell(cell, Tag.fromList(newTags))); + cells.set(i, new TagRewriteCell(cell, TagUtil.fromList(newTags))); } } } @@ -7047,8 +7048,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi private static List carryForwardTags(final Cell cell, final List tags) { if (cell.getTagsLength() <= 0) return tags; List newTags = tags == null? new ArrayList(): /*Append Tags*/tags; - Iterator i = - CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + Iterator i = CellUtil.tagsIterator(cell); while (i.hasNext()) newTags.add(i.next()); return newTags; } @@ -7152,11 +7152,11 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi if (mutate.getTTL() != Long.MAX_VALUE) { // Add the new TTL tag - newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutate.getTTL()))); + newTags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutate.getTTL()))); } // Rebuild tags - byte[] tagBytes = Tag.fromList(newTags); + byte[] tagBytes = TagUtil.fromList(newTags); // allocate an empty cell once newCell = new KeyValue(row.length, cell.getFamilyLength(), @@ -7190,9 +7190,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi if (mutate.getTTL() != Long.MAX_VALUE) { List newTags = new ArrayList(1); - newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutate.getTTL()))); + newTags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutate.getTTL()))); // Add the new TTL tag - newCell = new TagRewriteCell(cell, Tag.fromList(newTags)); + newCell = new TagRewriteCell(cell, TagUtil.fromList(newTags)); } else { newCell = cell; } @@ -7413,7 +7413,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // Add the TTL tag if the mutation carried one if (mutation.getTTL() != Long.MAX_VALUE) { - newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutation.getTTL()))); + newTags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutation.getTTL()))); } Cell newKV = new KeyValue(row, 0, row.length, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index badbd65..8d66696 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -59,6 +59,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.conf.ConfigurationManager; @@ -1779,28 +1780,24 @@ public class HStore implements Store { * @return true if the cell is expired */ static boolean isCellTTLExpired(final Cell cell, final long oldestTimestamp, final long now) { - // Do not create an Iterator or Tag objects unless the cell actually has tags. - if (cell.getTagsLength() > 0) { - // Look for a TTL tag first. Use it instead of the family setting if - // found. If a cell has multiple TTLs, resolve the conflict by using the - // first tag encountered. - Iterator i = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); - while (i.hasNext()) { - Tag t = i.next(); - if (TagType.TTL_TAG_TYPE == t.getType()) { - // Unlike in schema cell TTLs are stored in milliseconds, no need - // to convert - long ts = cell.getTimestamp(); - assert t.getTagLength() == Bytes.SIZEOF_LONG; - long ttl = Bytes.toLong(t.getBuffer(), t.getTagOffset(), t.getTagLength()); - if (ts + ttl < now) { - return true; - } - // Per cell TTLs cannot extend lifetime beyond family settings, so - // fall through to check that - break; + // Look for a TTL tag first. Use it instead of the family setting if + // found. If a cell has multiple TTLs, resolve the conflict by using the + // first tag encountered. + Iterator i = CellUtil.tagsIterator(cell); + while (i.hasNext()) { + Tag t = i.next(); + if (TagType.TTL_TAG_TYPE == t.getType()) { + // Unlike in schema cell TTLs are stored in milliseconds, no need + // to convert + long ts = cell.getTimestamp(); + assert t.getValueLength() == Bytes.SIZEOF_LONG; + long ttl = TagUtil.getValueAsLong(t); + if (ts + ttl < now) { + return true; } + // Per cell TTLs cannot extend lifetime beyond family settings, so + // fall through to check that + break; } } return false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index 887af0a..f0723c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -658,8 +659,7 @@ public class AccessControlLists { return null; } List results = Lists.newArrayList(); - Iterator tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); + Iterator tagsIterator = CellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == ACL_TAG_TYPE) { @@ -668,7 +668,12 @@ public class AccessControlLists { // use the builder AccessControlProtos.UsersAndPermissions.Builder builder = AccessControlProtos.UsersAndPermissions.newBuilder(); - ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()); + if (tag.hasArray()) { + ProtobufUtil.mergeFrom(builder, tag.getValueArray(), tag.getValueOffset(), + tag.getValueLength()); + } else { + ProtobufUtil.mergeFrom(builder,TagUtil.cloneValue(tag)); + } ListMultimap kvPerms = ProtobufUtil.toUsersAndPermissions(builder.build()); // Are there permissions for this user? diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 0d8b261..b4aae15 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security.access; import java.io.IOException; import java.net.InetAddress; import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; @@ -53,7 +54,9 @@ import org.apache.hadoop.hbase.ProcedureInfo; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagRewriteCell; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; @@ -882,15 +885,13 @@ public class AccessController extends BaseMasterAndRegionObserver List newCells = Lists.newArrayList(); for (Cell cell: e.getValue()) { // Prepend the supplied perms in a new ACL tag to an update list of tags for the cell - List tags = Lists.newArrayList(new Tag(AccessControlLists.ACL_TAG_TYPE, perms)); - if (cell.getTagsLength() > 0) { - Iterator tagIterator = CellUtil.tagsIterator(cell.getTagsArray(), - cell.getTagsOffset(), cell.getTagsLength()); - while (tagIterator.hasNext()) { - tags.add(tagIterator.next()); - } + List tags = new ArrayList(); + tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, perms)); + Iterator tagIterator = CellUtil.tagsIterator(cell); + while (tagIterator.hasNext()) { + tags.add(tagIterator.next()); } - newCells.add(new TagRewriteCell(cell, Tag.fromList(tags))); + newCells.add(new TagRewriteCell(cell, TagUtil.fromList(tags))); } // This is supposed to be safe, won't CME e.setValue(newCells); @@ -915,14 +916,10 @@ public class AccessController extends BaseMasterAndRegionObserver return; } for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { - Cell cell = cellScanner.current(); - if (cell.getTagsLength() > 0) { - Iterator tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); - while (tagsItr.hasNext()) { - if (tagsItr.next().getType() == AccessControlLists.ACL_TAG_TYPE) { - throw new AccessDeniedException("Mutation contains cell with reserved type tag"); - } + Iterator tagsItr = CellUtil.tagsIterator(cellScanner.current()); + while (tagsItr.hasNext()) { + if (tagsItr.next().getType() == AccessControlLists.ACL_TAG_TYPE) { + throw new AccessDeniedException("Mutation contains cell with reserved type tag"); } } } @@ -1997,32 +1994,21 @@ public class AccessController extends BaseMasterAndRegionObserver // Collect any ACLs from the old cell List tags = Lists.newArrayList(); + List aclTags = Lists.newArrayList(); ListMultimap perms = ArrayListMultimap.create(); if (oldCell != null) { - // Save an object allocation where we can - if (oldCell.getTagsLength() > 0) { - Iterator tagIterator = CellUtil.tagsIterator(oldCell.getTagsArray(), - oldCell.getTagsOffset(), oldCell.getTagsLength()); - while (tagIterator.hasNext()) { - Tag tag = tagIterator.next(); - if (tag.getType() != AccessControlLists.ACL_TAG_TYPE) { - // Not an ACL tag, just carry it through - if (LOG.isTraceEnabled()) { - LOG.trace("Carrying forward tag from " + oldCell + ": type " + tag.getType() + - " length " + tag.getTagLength()); - } - tags.add(tag); - } else { - // Merge the perms from the older ACL into the current permission set - // TODO: The efficiency of this can be improved. Don't build just to unpack - // again, use the builder - AccessControlProtos.UsersAndPermissions.Builder builder = - AccessControlProtos.UsersAndPermissions.newBuilder(); - ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()); - ListMultimap kvPerms = - ProtobufUtil.toUsersAndPermissions(builder.build()); - perms.putAll(kvPerms); + Iterator tagIterator = CellUtil.tagsIterator(oldCell); + while (tagIterator.hasNext()) { + Tag tag = tagIterator.next(); + if (tag.getType() != AccessControlLists.ACL_TAG_TYPE) { + // Not an ACL tag, just carry it through + if (LOG.isTraceEnabled()) { + LOG.trace("Carrying forward tag from " + oldCell + ": type " + tag.getType() + + " length " + tag.getValueLength()); } + tags.add(tag); + } else { + aclTags.add(tag); } } } @@ -2031,7 +2017,7 @@ public class AccessController extends BaseMasterAndRegionObserver byte[] aclBytes = mutation.getACL(); if (aclBytes != null) { // Yes, use it - tags.add(new Tag(AccessControlLists.ACL_TAG_TYPE, aclBytes)); + tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, aclBytes)); } else { // No, use what we carried forward if (perms != null) { @@ -2041,8 +2027,7 @@ public class AccessController extends BaseMasterAndRegionObserver if (LOG.isTraceEnabled()) { LOG.trace("Carrying forward ACLs from " + oldCell + ": " + perms); } - tags.add(new Tag(AccessControlLists.ACL_TAG_TYPE, - ProtobufUtil.toUsersAndPermissions(perms).toByteArray())); + tags.addAll(aclTags); } } @@ -2051,7 +2036,7 @@ public class AccessController extends BaseMasterAndRegionObserver return newCell; } - Cell rewriteCell = new TagRewriteCell(newCell, Tag.fromList(tags)); + Cell rewriteCell = new TagRewriteCell(newCell, TagUtil.fromList(tags)); return rewriteCell; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java index 42d6a03..b89079d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java @@ -48,7 +48,9 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; @@ -90,7 +92,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService } catch (IOException e) { // We write to a byte array. No Exception can happen. } - LABELS_TABLE_TAGS[0] = new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray()); + LABELS_TABLE_TAGS[0] = new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()); } public DefaultVisibilityLabelServiceImpl() { @@ -481,42 +483,37 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService @Override public boolean evaluate(Cell cell) throws IOException { boolean visibilityTagPresent = false; - // Save an object allocation where we can - if (cell.getTagsLength() > 0) { - Iterator tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); - while (tagsItr.hasNext()) { - boolean includeKV = true; - Tag tag = tagsItr.next(); - if (tag.getType() == VISIBILITY_TAG_TYPE) { - visibilityTagPresent = true; - int offset = tag.getTagOffset(); - int endOffset = offset + tag.getTagLength(); - while (offset < endOffset) { - Pair result = StreamUtils - .readRawVarint32(tag.getBuffer(), offset); - int currLabelOrdinal = result.getFirst(); - if (currLabelOrdinal < 0) { - // check for the absence of this label in the Scan Auth labels - // ie. to check BitSet corresponding bit is 0 - int temp = -currLabelOrdinal; - if (bs.get(temp)) { - includeKV = false; - break; - } - } else { - if (!bs.get(currLabelOrdinal)) { - includeKV = false; - break; - } + Iterator tagsItr = CellUtil.tagsIterator(cell); + while (tagsItr.hasNext()) { + boolean includeKV = true; + Tag tag = tagsItr.next(); + if (tag.getType() == VISIBILITY_TAG_TYPE) { + visibilityTagPresent = true; + int offset = tag.getValueOffset(); + int endOffset = offset + tag.getValueLength(); + while (offset < endOffset) { + Pair result = TagUtil.readVIntValuePart(tag, offset); + int currLabelOrdinal = result.getFirst(); + if (currLabelOrdinal < 0) { + // check for the absence of this label in the Scan Auth labels + // ie. to check BitSet corresponding bit is 0 + int temp = -currLabelOrdinal; + if (bs.get(temp)) { + includeKV = false; + break; + } + } else { + if (!bs.get(currLabelOrdinal)) { + includeKV = false; + break; } - offset += result.getSecond(); - } - if (includeKV) { - // We got one visibility expression getting evaluated to true. Good to include this - // KV in the result then. - return true; } + offset += result.getSecond(); + } + if (includeKV) { + // We got one visibility expression getting evaluated to true. Good to include this + // KV in the result then. + return true; } } } @@ -596,8 +593,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService for (Tag tag : deleteVisTags) { matchFound = false; for (Tag givenTag : putVisTags) { - if (Bytes.equals(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength(), - givenTag.getBuffer(), givenTag.getTagOffset(), givenTag.getTagLength())) { + if (TagUtil.matchingValue(tag, givenTag)) { matchFound = true; break; } @@ -621,10 +617,10 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService private static void getSortedTagOrdinals(List> fullTagsList, Tag tag) throws IOException { List tagsOrdinalInSortedOrder = new ArrayList(); - int offset = tag.getTagOffset(); - int endOffset = offset + tag.getTagLength(); + int offset = tag.getValueOffset(); + int endOffset = offset + tag.getValueLength(); while (offset < endOffset) { - Pair result = StreamUtils.readRawVarint32(tag.getBuffer(), offset); + Pair result = TagUtil.readVIntValuePart(tag, offset); tagsOrdinalInSortedOrder.add(result.getFirst()); offset += result.getSecond(); } @@ -678,11 +674,11 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService visibilityString.append(VisibilityConstants.CLOSED_PARAN).append( VisibilityConstants.OR_OPERATOR); } - int offset = tag.getTagOffset(); - int endOffset = offset + tag.getTagLength(); + int offset = tag.getValueOffset(); + int endOffset = offset + tag.getValueLength(); boolean expressionStart = true; while (offset < endOffset) { - Pair result = StreamUtils.readRawVarint32(tag.getBuffer(), offset); + Pair result = TagUtil.readVIntValuePart(tag, offset); int currLabelOrdinal = result.getFirst(); if (currLabelOrdinal < 0) { int temp = -currLabelOrdinal; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index 5b8bdb3..b025758 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagRewriteCell; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; @@ -340,8 +341,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements Tag tag = pair.getSecond(); if (cellVisibility == null && tag != null) { // May need to store only the first one - cellVisibility = new CellVisibility(Bytes.toString(tag.getBuffer(), tag.getTagOffset(), - tag.getTagLength())); + cellVisibility = new CellVisibility(TagUtil.getValueAsString(tag)); modifiedTagFound = true; } } @@ -368,14 +368,13 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements List updatedCells = new ArrayList(); for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); - List tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); + List tags = CellUtil.getTags(cell); if (modifiedTagFound) { // Rewrite the tags by removing the modified tags. removeReplicationVisibilityTag(tags); } tags.addAll(visibilityTags); - Cell updatedCell = new TagRewriteCell(cell, Tag.fromList(tags)); + Cell updatedCell = new TagRewriteCell(cell, TagUtil.fromList(tags)); updatedCells.add(updatedCell); } m.getFamilyCellMap().clear(); @@ -472,28 +471,22 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements // cell visiblilty tags // have been modified Tag modifiedTag = null; - if (cell.getTagsLength() > 0) { - Iterator tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), - cell.getTagsOffset(), cell.getTagsLength()); - while (tagsIterator.hasNext()) { - Tag tag = tagsIterator.next(); - if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) { - modifiedTag = tag; - break; - } + Iterator tagsIterator = CellUtil.tagsIterator(cell); + while (tagsIterator.hasNext()) { + Tag tag = tagsIterator.next(); + if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) { + modifiedTag = tag; + break; } } pair.setFirst(true); pair.setSecond(modifiedTag); return pair; } - if (cell.getTagsLength() > 0) { - Iterator tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); - while (tagsItr.hasNext()) { - if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) { - return pair; - } + Iterator tagsItr = CellUtil.tagsIterator(cell); + while (tagsItr.hasNext()) { + if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) { + return pair; } } pair.setFirst(true); @@ -520,13 +513,10 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements if (isSystemOrSuperUser()) { return true; } - if (cell.getTagsLength() > 0) { - Iterator tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); - while (tagsItr.hasNext()) { - if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) { - return false; - } + Iterator tagsItr = CellUtil.tagsIterator(cell); + while (tagsItr.hasNext()) { + if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) { + return false; } } return true; @@ -739,21 +729,17 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements boolean authCheck = authorizationEnabled && checkAuths && !(isSystemOrSuperUser()); tags.addAll(this.visibilityLabelService.createVisibilityExpTags(cellVisibility.getExpression(), true, authCheck)); - // Save an object allocation where we can - if (newCell.getTagsLength() > 0) { - // Carry forward all other tags - Iterator tagsItr = CellUtil.tagsIterator(newCell.getTagsArray(), - newCell.getTagsOffset(), newCell.getTagsLength()); - while (tagsItr.hasNext()) { - Tag tag = tagsItr.next(); - if (tag.getType() != TagType.VISIBILITY_TAG_TYPE - && tag.getType() != TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { - tags.add(tag); - } + // Carry forward all other tags + Iterator tagsItr = CellUtil.tagsIterator(newCell); + while (tagsItr.hasNext()) { + Tag tag = tagsItr.next(); + if (tag.getType() != TagType.VISIBILITY_TAG_TYPE + && tag.getType() != TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { + tags.add(tag); } } - Cell rewriteCell = new TagRewriteCell(newCell, Tag.fromList(tags)); + Cell rewriteCell = new TagRewriteCell(newCell, TagUtil.fromList(tags)); return rewriteCell; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java index aca4994..c25d9f0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java @@ -26,8 +26,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagRewriteCell; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.replication.ReplicationEndpoint; @@ -79,7 +81,7 @@ public class VisibilityReplicationEndpoint implements ReplicationEndpoint { byte[] modifiedVisExpression = visibilityLabelsService .encodeVisibilityForReplication(visTags, serializationFormat); if (modifiedVisExpression != null) { - nonVisTags.add(new Tag(TagType.STRING_VIS_TAG_TYPE, modifiedVisExpression)); + nonVisTags.add(new ArrayBackedTag(TagType.STRING_VIS_TAG_TYPE, modifiedVisExpression)); } } catch (Exception ioe) { LOG.error( @@ -92,7 +94,7 @@ public class VisibilityReplicationEndpoint implements ReplicationEndpoint { continue; } // Recreate the cell with the new tags and the existing tags - Cell newCell = new TagRewriteCell(cell, Tag.fromList(nonVisTags)); + Cell newCell = new TagRewriteCell(cell, TagUtil.fromList(nonVisTags)); newEdit.add(newCell); } else { newEdit.add(cell); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java index c725b11..7f48555 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -39,7 +39,9 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.Filter; @@ -74,7 +76,7 @@ public class VisibilityUtils { public static final String VISIBILITY_LABEL_GENERATOR_CLASS = "hbase.regionserver.scan.visibility.label.generator.class"; public static final String SYSTEM_LABEL = "system"; - public static final Tag SORTED_ORDINAL_SERIALIZATION_FORMAT_TAG = new Tag( + public static final Tag SORTED_ORDINAL_SERIALIZATION_FORMAT_TAG = new ArrayBackedTag( TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE, VisibilityConstants.SORTED_ORDINAL_SERIALIZATION_FORMAT_TAG_VAL); private static final String COMMA = ","; @@ -209,16 +211,13 @@ public class VisibilityUtils { */ public static Byte extractVisibilityTags(Cell cell, List tags) { Byte serializationFormat = null; - if (cell.getTagsLength() > 0) { - Iterator tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); - while (tagsIterator.hasNext()) { - Tag tag = tagsIterator.next(); - if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { - serializationFormat = tag.getBuffer()[tag.getTagOffset()]; - } else if (tag.getType() == VISIBILITY_TAG_TYPE) { - tags.add(tag); - } + Iterator tagsIterator = CellUtil.tagsIterator(cell); + while (tagsIterator.hasNext()) { + Tag tag = tagsIterator.next(); + if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { + serializationFormat = TagUtil.getValueAsByte(tag); + } else if (tag.getType() == VISIBILITY_TAG_TYPE) { + tags.add(tag); } } return serializationFormat; @@ -239,30 +238,23 @@ public class VisibilityUtils { public static Byte extractAndPartitionTags(Cell cell, List visTags, List nonVisTags) { Byte serializationFormat = null; - if (cell.getTagsLength() > 0) { - Iterator tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); - while (tagsIterator.hasNext()) { - Tag tag = tagsIterator.next(); - if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { - serializationFormat = tag.getBuffer()[tag.getTagOffset()]; - } else if (tag.getType() == VISIBILITY_TAG_TYPE) { - visTags.add(tag); - } else { - // ignore string encoded visibility expressions, will be added in replication handling - nonVisTags.add(tag); - } + Iterator tagsIterator = CellUtil.tagsIterator(cell); + while (tagsIterator.hasNext()) { + Tag tag = tagsIterator.next(); + if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { + serializationFormat = TagUtil.getValueAsByte(tag); + } else if (tag.getType() == VISIBILITY_TAG_TYPE) { + visTags.add(tag); + } else { + // ignore string encoded visibility expressions, will be added in replication handling + nonVisTags.add(tag); } } return serializationFormat; } public static boolean isVisibilityTagsPresent(Cell cell) { - if (cell.getTagsLength() == 0) { - return false; - } - Iterator tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); + Iterator tagsIterator = CellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); if (tag.getType() == VISIBILITY_TAG_TYPE) { @@ -322,7 +314,7 @@ public class VisibilityUtils { if (node.isSingleNode()) { getLabelOrdinals(node, labelOrdinals, auths, checkAuths, ordinalProvider); writeLabelOrdinalsToStream(labelOrdinals, dos); - tags.add(new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray())); + tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray())); baos.reset(); } else { NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node; @@ -330,14 +322,14 @@ public class VisibilityUtils { for (ExpressionNode child : nlNode.getChildExps()) { getLabelOrdinals(child, labelOrdinals, auths, checkAuths, ordinalProvider); writeLabelOrdinalsToStream(labelOrdinals, dos); - tags.add(new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray())); + tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray())); baos.reset(); labelOrdinals.clear(); } } else { getLabelOrdinals(nlNode, labelOrdinals, auths, checkAuths, ordinalProvider); writeLabelOrdinalsToStream(labelOrdinals, dos); - tags.add(new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray())); + tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray())); baos.reset(); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java index 5df7394..b212fe6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; @@ -339,12 +340,10 @@ public class WALPrettyPrinter { stringMap.put("vlen", cell.getValueLength()); if (cell.getTagsLength() > 0) { List tagsString = new ArrayList(); - Iterator tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); + Iterator tagsIterator = CellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag tag = tagsIterator.next(); - tagsString.add((tag.getType()) + ":" - + Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength())); + tagsString.add((tag.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(tag))); } stringMap.put("tag", tagsString); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index 30629a3..821b995 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -1405,7 +1405,7 @@ public class PerformanceEvaluation extends Configured implements Tool { byte[] tag = generateData(this.rand, TAG_LENGTH); Tag[] tags = new Tag[opts.noOfTags]; for (int n = 0; n < opts.noOfTags; n++) { - Tag t = new Tag((byte) n, tag); + Tag t = new ArrayBackedTag((byte) n, tag); tags[n] = t; } KeyValue kv = new KeyValue(row, FAMILY_NAME, qualifier, HConstants.LATEST_TIMESTAMP, @@ -1493,7 +1493,7 @@ public class PerformanceEvaluation extends Configured implements Tool { byte[] tag = generateData(this.rand, TAG_LENGTH); Tag[] tags = new Tag[opts.noOfTags]; for (int n = 0; n < opts.noOfTags; n++) { - Tag t = new Tag((byte) n, tag); + Tag t = new ArrayBackedTag((byte) n, tag); tags[n] = t; } KeyValue kv = new KeyValue(row, FAMILY_NAME, qualifier, HConstants.LATEST_TIMESTAMP, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java index f83590a..1647e97 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -103,11 +104,11 @@ public class TestResultSizeEstimation { Table table = TEST_UTIL.createTable(TABLE, FAMILIES); Put p = new Put(ROW1); p.add(new KeyValue(ROW1, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE, - new Tag[] { new Tag((byte)1, new byte[TAG_DATA_SIZE]) } )); + new Tag[] { new ArrayBackedTag((byte)1, new byte[TAG_DATA_SIZE]) } )); table.put(p); p = new Put(ROW2); p.add(new KeyValue(ROW2, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE, - new Tag[] { new Tag((byte)1, new byte[TAG_DATA_SIZE]) } )); + new Tag[] { new ArrayBackedTag((byte)1, new byte[TAG_DATA_SIZE]) } )); table.put(p); Scan s = new Scan(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java index ce48ca1..00969b2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeSeeker; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.BufferGrabbingByteArrayOutputStream; @@ -136,10 +137,10 @@ public class TestDataBlockEncoders { } else { byte[] metaValue1 = Bytes.toBytes("metaValue1"); byte[] metaValue2 = Bytes.toBytes("metaValue2"); - kvList.add(new KeyValue(row, family, qualifier, 0l, value, new Tag[] { new Tag((byte) 1, - metaValue1) })); - kvList.add(new KeyValue(row, family, qualifier, 0l, value, new Tag[] { new Tag((byte) 1, - metaValue2) })); + kvList.add(new KeyValue(row, family, qualifier, 0l, value, + new Tag[] { new ArrayBackedTag((byte) 1, metaValue1) })); + kvList.add(new KeyValue(row, family, qualifier, 0l, value, + new Tag[] { new ArrayBackedTag((byte) 1, metaValue2) })); } testEncodersOnDataset(kvList, includesMemstoreTS, includesTags); } @@ -160,10 +161,10 @@ public class TestDataBlockEncoders { if (includesTags) { byte[] metaValue1 = Bytes.toBytes("metaValue1"); byte[] metaValue2 = Bytes.toBytes("metaValue2"); - kvList.add(new KeyValue(row, family, qualifier, 0l, value, new Tag[] { new Tag((byte) 1, - metaValue1) })); - kvList.add(new KeyValue(row, family, qualifier, 0l, value, new Tag[] { new Tag((byte) 1, - metaValue2) })); + kvList.add(new KeyValue(row, family, qualifier, 0l, value, + new Tag[] { new ArrayBackedTag((byte) 1, metaValue1) })); + kvList.add(new KeyValue(row, family, qualifier, 0l, value, + new Tag[] { new ArrayBackedTag((byte) 1, metaValue2) })); } else { kvList.add(new KeyValue(row, family, qualifier, -1l, Type.Put, value)); kvList.add(new KeyValue(row, family, qualifier, -2l, Type.Put, value)); @@ -416,10 +417,10 @@ public class TestDataBlockEncoders { byte[] value0 = new byte[] { 'd' }; byte[] value1 = new byte[] { 0x00 }; if (includesTags) { - kvList.add(new KeyValue(row, family, qualifier0, 0, value0, new Tag[] { new Tag((byte) 1, - "value1") })); - kvList.add(new KeyValue(row, family, qualifier1, 0, value1, new Tag[] { new Tag((byte) 1, - "value1") })); + kvList.add(new KeyValue(row, family, qualifier0, 0, value0, + new Tag[] { new ArrayBackedTag((byte) 1, "value1") })); + kvList.add(new KeyValue(row, family, qualifier1, 0, value1, + new Tag[] { new ArrayBackedTag((byte) 1, "value1") })); } else { kvList.add(new KeyValue(row, family, qualifier0, 0, Type.Put, value0)); kvList.add(new KeyValue(row, family, qualifier1, 0, Type.Put, value1)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java index ce66e82..0869df6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -148,7 +149,7 @@ public class TestEncodedSeekers { byte[] value = dataGenerator.generateRandomSizeValue(key, col); if (includeTags) { Tag[] tag = new Tag[1]; - tag[0] = new Tag((byte) 1, "Visibility"); + tag[0] = new ArrayBackedTag((byte) 1, "Visibility"); KeyValue kv = new KeyValue(key, CF_BYTES, col, HConstants.LATEST_TIMESTAMP, value, tag); put.add(kv); } else { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java index 031bf25..fd9b90b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker; @@ -280,7 +281,7 @@ public class TestPrefixTreeEncoding { kvset.add(kv); } else { KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), 0l, - getValue(batchId, i, j), new Tag[] { new Tag((byte) 1, "metaValue1") }); + getValue(batchId, i, j), new Tag[] { new ArrayBackedTag((byte) 1, "metaValue1") }); kvset.add(kv); } } @@ -308,7 +309,7 @@ public class TestPrefixTreeEncoding { kvset.add(kv); } else { KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), 0l, - getValue(batchId, i, j), new Tag[] { new Tag((byte) 1, "metaValue1") }); + getValue(batchId, i, j), new Tag[] { new ArrayBackedTag((byte) 1, "metaValue1") }); kvset.add(kv); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 2c957ef..e63129b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.fs.HFileSystem; @@ -385,7 +386,7 @@ public class TestCacheOnWrite { byte[] value = TestHFileWriterV2.randomValue(rand); KeyValue kv; if(useTags) { - Tag t = new Tag((byte) 1, "visibility"); + Tag t = new ArrayBackedTag((byte) 1, "visibility"); List tagList = new ArrayList(); tagList.add(t); Tag[] tags = new Tag[1]; @@ -434,7 +435,7 @@ public class TestCacheOnWrite { String valueStr = "value_" + rowStr + "_" + qualStr; for (int iTS = 0; iTS < 5; ++iTS) { if (useTags) { - Tag t = new Tag((byte) 1, "visibility"); + Tag t = new ArrayBackedTag((byte) 1, "visibility"); Tag[] tags = new Tag[1]; tags[0] = t; KeyValue kv = new KeyValue(Bytes.toBytes(rowStr), cfBytes, Bytes.toBytes(qualStr), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index 929ad8a..66fb49c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.HFile.Reader; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; @@ -169,7 +170,7 @@ public class TestHFile { for (int i = start; i < (start + n); i++) { String key = String.format(localFormatter, Integer.valueOf(i)); if (useTags) { - Tag t = new Tag((byte) 1, "myTag1"); + Tag t = new ArrayBackedTag((byte) 1, "myTag1"); Tag[] tags = new Tag[1]; tags[0] = t; kv = new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index 12fb584..4ee7f5b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; @@ -167,8 +168,8 @@ public class TestHFileBlock { if (!useTag) { keyValues.add(new KeyValue(row, family, qualifier, timestamp, value)); } else { - keyValues.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new Tag( - (byte) 1, Bytes.toBytes("myTagVal")) })); + keyValues.add(new KeyValue(row, family, qualifier, timestamp, value, + new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("myTagVal")) })); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java index 979c9f6..faa6853 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; @@ -140,7 +141,7 @@ public class TestHFileWriterV3 { for (int j = 0; j < 1 + rand.nextInt(4); j++) { byte[] tagBytes = new byte[16]; rand.nextBytes(tagBytes); - tags.add(new Tag((byte) 1, tagBytes)); + tags.add(new ArrayBackedTag((byte) 1, tagBytes)); } keyValue = new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP, valueBytes, tags); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java index a17368c..90e398d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -84,7 +85,7 @@ public class TestReseekTo { Bytes.toBytes(value)); writer.append(kv); } else if (tagUsage == TagUsage.ONLY_TAG) { - Tag t = new Tag((byte) 1, "myTag1"); + Tag t = new ArrayBackedTag((byte) 1, "myTag1"); Tag[] tags = new Tag[1]; tags[0] = t; kv = new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"), @@ -92,7 +93,7 @@ public class TestReseekTo { writer.append(kv); } else { if (key % 4 == 0) { - Tag t = new Tag((byte) 1, "myTag1"); + Tag t = new ArrayBackedTag((byte) 1, "myTag1"); Tag[] tags = new Tag[1]; tags[0] = t; kv = new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index c1d91ec..6eead71 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -45,6 +45,8 @@ import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.OffheapKeyValue; import org.apache.hadoop.hbase.ShareableMemory; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -88,7 +90,7 @@ public class TestSeekTo { return new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), Bytes.toBytes("value")); } else if (tagUsage == TagUsage.ONLY_TAG) { - Tag t = new Tag((byte) 1, "myTag1"); + Tag t = new ArrayBackedTag((byte) 1, "myTag1"); Tag[] tags = new Tag[1]; tags[0] = t; return new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), @@ -100,7 +102,7 @@ public class TestSeekTo { Bytes.toBytes("qualifier"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value")); } else { switchKVs = false; - Tag t = new Tag((byte) 1, "myTag1"); + Tag t = new ArrayBackedTag((byte) 1, "myTag1"); Tag[] tags = new Tag[1]; tags[0] = t; return new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), @@ -174,11 +176,10 @@ public class TestSeekTo { assertEquals("i", toRowStr(scanner.getCell())); Cell cell = scanner.getCell(); if (tagUsage != TagUsage.NO_TAG && cell.getTagsLength() > 0) { - Iterator tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); + Iterator tagsIterator = CellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { Tag next = tagsIterator.next(); - assertEquals("myTag1", Bytes.toString(next.getValue())); + assertEquals("myTag1", Bytes.toString(TagUtil.cloneValue(next))); } } assertTrue(scanner.seekBefore(toKV("k", tagUsage))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java index 47b6b5c..ef02431 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Scan; @@ -57,7 +58,6 @@ import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.hadoop.hbase.monitoring.MonitoredTask; -import org.apache.hadoop.hbase.regionserver.StoreFile.Reader; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.NoLimitCompactionThroughputController; import org.apache.hadoop.hbase.security.EncryptionUtil; @@ -190,7 +190,8 @@ public class TestHMobStore { String targetPathName = MobUtils.formatDate(currentDate); byte[] referenceValue = Bytes.toBytes(targetPathName + Path.SEPARATOR + mobFilePath.getName()); - Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName().getName()); + Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, + store.getTableName().getName()); KeyValue kv1 = new KeyValue(row, family, qf1, Long.MAX_VALUE, referenceValue); KeyValue kv2 = new KeyValue(row, family, qf2, Long.MAX_VALUE, referenceValue); KeyValue kv3 = new KeyValue(row2, family, qf3, Long.MAX_VALUE, referenceValue); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 35de488..9f778bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -96,7 +96,7 @@ import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RegionTooBusyException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.Append; @@ -6317,16 +6317,16 @@ public class TestHRegion { long now = EnvironmentEdgeManager.currentTime(); // Add a cell that will expire in 5 seconds via cell TTL region.put(new Put(row).add(new KeyValue(row, fam1, q1, now, - HConstants.EMPTY_BYTE_ARRAY, new Tag[] { + HConstants.EMPTY_BYTE_ARRAY, new ArrayBackedTag[] { // TTL tags specify ts in milliseconds - new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } ))); + new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } ))); // Add a cell that will expire after 10 seconds via family setting region.put(new Put(row).addColumn(fam1, q2, now, HConstants.EMPTY_BYTE_ARRAY)); // Add a cell that will expire in 15 seconds via cell TTL region.put(new Put(row).add(new KeyValue(row, fam1, q3, now + 10000 - 1, - HConstants.EMPTY_BYTE_ARRAY, new Tag[] { + HConstants.EMPTY_BYTE_ARRAY, new ArrayBackedTag[] { // TTL tags specify ts in milliseconds - new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } ))); + new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } ))); // Add a cell that will expire in 20 seconds via family setting region.put(new Put(row).addColumn(fam1, q4, now + 10000 - 1, HConstants.EMPTY_BYTE_ARRAY)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java index 1bcb7c9..3c062f8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java @@ -31,9 +31,11 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; @@ -86,7 +88,7 @@ public class TestStoreFileScannerWithTagCompression { kv.getRowLength())); List tags = KeyValueUtil.ensureKeyValue(kv).getTags(); assertEquals(1, tags.size()); - assertEquals("tag3", Bytes.toString(tags.get(0).getValue())); + assertEquals("tag3", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); } finally { s.close(); } @@ -97,9 +99,9 @@ public class TestStoreFileScannerWithTagCompression { byte[] qualifier = Bytes.toBytes("q"); long now = System.currentTimeMillis(); byte[] b = Bytes.toBytes("k1"); - Tag t1 = new Tag((byte) 1, "tag1"); - Tag t2 = new Tag((byte) 2, "tag2"); - Tag t3 = new Tag((byte) 3, "tag3"); + Tag t1 = new ArrayBackedTag((byte) 1, "tag1"); + Tag t2 = new ArrayBackedTag((byte) 2, "tag2"); + Tag t3 = new ArrayBackedTag((byte) 3, "tag3"); try { writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t1 })); b = Bytes.toBytes("k3"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java index a85e479..0f7f23a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java @@ -36,6 +36,8 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Durability; @@ -325,7 +327,7 @@ public class TestTags { if (CellUtil.matchingRow(current, row)) { assertEquals(1, TestCoprocessorForTags.tags.size()); Tag tag = TestCoprocessorForTags.tags.get(0); - assertEquals(bigTagLen, tag.getTagLength()); + assertEquals(bigTagLen, tag.getValueLength()); } else { assertEquals(0, TestCoprocessorForTags.tags.size()); } @@ -350,7 +352,7 @@ public class TestTags { if (CellUtil.matchingRow(current, row)) { assertEquals(1, TestCoprocessorForTags.tags.size()); Tag tag = TestCoprocessorForTags.tags.get(0); - assertEquals(bigTagLen, tag.getTagLength()); + assertEquals(bigTagLen, tag.getValueLength()); } else { assertEquals(0, TestCoprocessorForTags.tags.size()); } @@ -403,7 +405,7 @@ public class TestTags { List tags = TestCoprocessorForTags.tags; assertEquals(3L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(1, tags.size()); - assertEquals("tag1", Bytes.toString(tags.get(0).getValue())); + assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -421,7 +423,7 @@ public class TestTags { // We cannot assume the ordering of tags List tagValues = new ArrayList(); for (Tag tag: tags) { - tagValues.add(Bytes.toString(tag.getValue())); + tagValues.add(Bytes.toString(TagUtil.cloneValue(tag))); } assertTrue(tagValues.contains("tag1")); assertTrue(tagValues.contains("tag2")); @@ -445,7 +447,7 @@ public class TestTags { tags = TestCoprocessorForTags.tags; assertEquals(4L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(1, tags.size()); - assertEquals("tag2", Bytes.toString(tags.get(0).getValue())); + assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -466,7 +468,7 @@ public class TestTags { kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(1, tags.size()); - assertEquals("tag1", Bytes.toString(tags.get(0).getValue())); + assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -483,7 +485,7 @@ public class TestTags { // We cannot assume the ordering of tags tagValues.clear(); for (Tag tag: tags) { - tagValues.add(Bytes.toString(tag.getValue())); + tagValues.add(Bytes.toString(TagUtil.cloneValue(tag))); } assertTrue(tagValues.contains("tag1")); assertTrue(tagValues.contains("tag2")); @@ -506,7 +508,7 @@ public class TestTags { kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(1, tags.size()); - assertEquals("tag2", Bytes.toString(tags.get(0).getValue())); + assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); } finally { TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.tags = null; @@ -569,7 +571,7 @@ public class TestTags { if (cf == null) { cf = CellUtil.cloneFamily(kv); } - Tag tag = new Tag((byte) 1, attribute); + Tag tag = new ArrayBackedTag((byte) 1, attribute); List tagList = new ArrayList(); tagList.add(tag); @@ -611,7 +613,7 @@ public class TestTags { CellScanner cellScanner = result.cellScanner(); if (cellScanner.advance()) { Cell cell = cellScanner.current(); - tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), + tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java index 0450904..104f897 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java @@ -24,9 +24,10 @@ import java.util.List; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.io.util.LRUDictionary; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.DataOutputBuffer; @@ -108,7 +109,7 @@ public class TestKeyValueCompression { byte[] value = Bytes.toBytes("myValue"); List tags = new ArrayList(noOfTags); for (int i = 1; i <= noOfTags; i++) { - tags.add(new Tag((byte) i, Bytes.toBytes("tagValue" + i))); + tags.add(new ArrayBackedTag((byte) i, Bytes.toBytes("tagValue" + i))); } return new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java index 501fdda..e834ac8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java @@ -30,6 +30,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.codec.Codec.Decoder; import org.apache.hadoop.hbase.codec.Codec.Encoder; import org.apache.hadoop.hbase.io.util.LRUDictionary; @@ -69,7 +71,7 @@ public class TestWALCellCodecWithCompression { KeyValue kv = (KeyValue) decoder.current(); List tags = kv.getTags(); assertEquals(1, tags.size()); - assertEquals("tagValue1", Bytes.toString(tags.get(0).getValue())); + assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); decoder.advance(); kv = (KeyValue) decoder.current(); tags = kv.getTags(); @@ -78,8 +80,8 @@ public class TestWALCellCodecWithCompression { kv = (KeyValue) decoder.current(); tags = kv.getTags(); assertEquals(2, tags.size()); - assertEquals("tagValue1", Bytes.toString(tags.get(0).getValue())); - assertEquals("tagValue2", Bytes.toString(tags.get(1).getValue())); + assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); + assertEquals("tagValue2", Bytes.toString(TagUtil.cloneValue(tags.get(1)))); } private KeyValue createKV(int noOfTags) { @@ -89,7 +91,7 @@ public class TestWALCellCodecWithCompression { byte[] value = Bytes.toBytes("myValue"); List tags = new ArrayList(noOfTags); for (int i = 1; i <= noOfTags; i++) { - tags.add(new Tag((byte) i, Bytes.toBytes("tagValue" + i))); + tags.add(new ArrayBackedTag((byte) i, Bytes.toBytes("tagValue" + i))); } return new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index 988373f..8bfdc2a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -39,6 +39,8 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -209,7 +211,7 @@ public class TestReplicationWithTags { if (cf == null) { cf = CellUtil.cloneFamily(kv); } - Tag tag = new Tag(TAG_TYPE, attribute); + Tag tag = new ArrayBackedTag(TAG_TYPE, attribute); List tagList = new ArrayList(); tagList.add(tag); @@ -238,7 +240,7 @@ public class TestReplicationWithTags { // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { Cell cell = results.get(0); - tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index 8ecc6e3..9f20c11 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Connection; @@ -2516,7 +2517,7 @@ public class TestAccessController extends SecureTestUtil { Table t = conn.getTable(TEST_TABLE);) { KeyValue kv = new KeyValue(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER, HConstants.LATEST_TIMESTAMP, HConstants.EMPTY_BYTE_ARRAY, - new Tag[] { new Tag(AccessControlLists.ACL_TAG_TYPE, + new Tag[] { new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, ProtobufUtil.toUsersAndPermissions(USER_OWNER.getShortName(), new Permission(Permission.Action.READ)).toByteArray()) }); t.put(new Put(TEST_ROW).add(kv)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index 104cb5b..e601af7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -40,7 +40,9 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -58,6 +60,7 @@ import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; /** @@ -73,7 +76,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer private static final byte[] DUMMY_VALUE = new byte[0]; private static final byte STRING_SERIALIZATION_FORMAT = 2; - private static final Tag STRING_SERIALIZATION_FORMAT_TAG = new Tag( + private static final Tag STRING_SERIALIZATION_FORMAT_TAG = new ArrayBackedTag( TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE, new byte[] { STRING_SERIALIZATION_FORMAT }); private final ExpressionParser expressionParser = new ExpressionParser(); @@ -281,28 +284,27 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer boolean visibilityTagPresent = false; // Save an object allocation where we can if (cell.getTagsLength() > 0) { - Iterator tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength()); + Iterator tagsItr = CellUtil.tagsIterator(cell); while (tagsItr.hasNext()) { boolean includeKV = true; Tag tag = tagsItr.next(); if (tag.getType() == VISIBILITY_TAG_TYPE) { visibilityTagPresent = true; - int offset = tag.getTagOffset(); - int endOffset = offset + tag.getTagLength(); + int offset = tag.getValueOffset(); + int endOffset = offset + tag.getValueLength(); while (offset < endOffset) { - short len = Bytes.toShort(tag.getBuffer(), offset); + short len = getTagValuePartAsShort(tag, offset); offset += 2; if (len < 0) { // This is a NOT label. len = (short) (-1 * len); - String label = Bytes.toString(tag.getBuffer(), offset, len); + String label = Bytes.toString(tag.getValueArray(), offset, len); if (authLabelsFinal.contains(label)) { includeKV = false; break; } } else { - String label = Bytes.toString(tag.getBuffer(), offset, len); + String label = Bytes.toString(tag.getValueArray(), offset, len); if (!authLabelsFinal.contains(label)) { includeKV = false; break; @@ -353,7 +355,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer dos.writeShort(bLabel.length); dos.write(bLabel); } - return new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray()); + return new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()); } private void extractLabels(ExpressionNode node, List labels, List notLabels) { @@ -423,8 +425,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer for (Tag tag : deleteVisTags) { matchFound = false; for (Tag givenTag : putVisTags) { - if (Bytes.equals(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength(), - givenTag.getBuffer(), givenTag.getTagOffset(), givenTag.getTagLength())) { + if (TagUtil.matchingValue(tag, givenTag)) { matchFound = true; break; } @@ -459,15 +460,15 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer visibilityString.append(VisibilityConstants.CLOSED_PARAN + VisibilityConstants.OR_OPERATOR); } - int offset = tag.getTagOffset(); - int endOffset = offset + tag.getTagLength(); + int offset = tag.getValueOffset(); + int endOffset = offset + tag.getValueLength(); boolean expressionStart = true; while (offset < endOffset) { - short len = Bytes.toShort(tag.getBuffer(), offset); + short len = getTagValuePartAsShort(tag, offset); offset += 2; if (len < 0) { len = (short) (-1 * len); - String label = Bytes.toString(tag.getBuffer(), offset, len); + String label = getTagValuePartAsString(tag, offset, len); if (expressionStart) { visibilityString.append(VisibilityConstants.OPEN_PARAN + VisibilityConstants.NOT_OPERATOR + CellVisibility.quote(label)); @@ -476,7 +477,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer + VisibilityConstants.NOT_OPERATOR + CellVisibility.quote(label)); } } else { - String label = Bytes.toString(tag.getBuffer(), offset, len); + String label = getTagValuePartAsString(tag, offset, len); if (expressionStart) { visibilityString.append(VisibilityConstants.OPEN_PARAN + CellVisibility.quote(label)); } else { @@ -496,4 +497,20 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer } return null; } + + private static short getTagValuePartAsShort(Tag t, int offset) { + if (t.hasArray()) { + return Bytes.toShort(t.getValueArray(), offset); + } + return ByteBufferUtils.toShort(t.getValueByteBuffer(), offset); + } + + private static String getTagValuePartAsString(Tag t, int offset, int length) { + if (t.hasArray()) { + return Bytes.toString(t.getValueArray(), offset, length); + } + byte[] b = new byte[length]; + ByteBufferUtils.copyFromBufferToArray(b, t.getValueByteBuffer(), offset, 0, length); + return Bytes.toString(b); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java index fecff07..2140a5c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -183,7 +184,7 @@ public class TestVisibilityLabelReplicationWithExpAsString extends TestVisibilit boolean foundNonVisTag = false; for(Tag t : TestCoprocessorForTagsAtSink.tags) { if(t.getType() == NON_VIS_TAG_TYPE) { - assertEquals(TEMP, Bytes.toString(t.getValue())); + assertEquals(TEMP, Bytes.toString(TagUtil.cloneValue(t))); foundNonVisTag = true; break; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java index 8414813..b3b3b43 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java @@ -43,8 +43,10 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.TagRewriteCell; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -284,11 +286,11 @@ public class TestVisibilityLabelsReplication { for (Cell cell : cells) { if ((Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0, row.length))) { - List tags = Tag - .asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + List tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); for (Tag tag : tags) { if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) { - assertEquals(visTag, Bytes.toString(tag.getValue())); + assertEquals(visTag, TagUtil.getValueAsString(tag)); tagFound = true; break; } @@ -330,7 +332,7 @@ public class TestVisibilityLabelsReplication { boolean foundNonVisTag = false; for (Tag t : TestCoprocessorForTagsAtSink.tags) { if (t.getType() == NON_VIS_TAG_TYPE) { - assertEquals(TEMP, Bytes.toString(t.getValue())); + assertEquals(TEMP, TagUtil.getValueAsString(t)); foundNonVisTag = true; break; } @@ -407,11 +409,11 @@ public class TestVisibilityLabelsReplication { if (cf == null) { cf = CellUtil.cloneFamily(kv); } - Tag tag = new Tag((byte) NON_VIS_TAG_TYPE, attribute); + Tag tag = new ArrayBackedTag((byte) NON_VIS_TAG_TYPE, attribute); List tagList = new ArrayList(); tagList.add(tag); tagList.addAll(kv.getTags()); - byte[] fromList = Tag.fromList(tagList); + byte[] fromList = TagUtil.fromList(tagList); TagRewriteCell newcell = new TagRewriteCell(kv, fromList); ((List) updatedCells).add(newcell); } @@ -433,7 +435,7 @@ public class TestVisibilityLabelsReplication { // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { Cell cell = results.get(0); - tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java index fdf4fd9..964d6ed 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java @@ -21,10 +21,13 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; +import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -98,14 +101,11 @@ public class HFileTestUtil { KeyValue kv = new KeyValue(key, family, qualifier, now, key); if (withTag) { // add a tag. Arbitrarily chose mob tag since we have a helper already. - Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, key); + Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, key); kv = MobUtils.createMobRefKeyValue(kv, key, tableNameTag); // verify that the kv has the tag. - byte[] ta = kv.getTagsArray(); - int toff = kv.getTagsOffset(); - int tlen = kv.getTagsLength(); - Tag t = Tag.getTag(ta, toff, tlen, TagType.MOB_TABLE_NAME_TAG_TYPE); + Tag t = CellUtil.getTag(kv, TagType.MOB_TABLE_NAME_TAG_TYPE); if (t == null) { throw new IllegalStateException("Tag didn't stick to KV " + kv.toString()); } @@ -130,15 +130,12 @@ public class HFileTestUtil { ResultScanner s = table.getScanner(new Scan()); for (Result r : s) { for (Cell c : r.listCells()) { - byte[] ta = c.getTagsArray(); - int toff = c.getTagsOffset(); - int tlen = c.getTagsLength(); - Tag t = Tag.getTag(ta, toff, tlen, TagType.MOB_TABLE_NAME_TAG_TYPE); + Tag t = CellUtil.getTag(c, TagType.MOB_TABLE_NAME_TAG_TYPE); if (t == null) { fail(c.toString() + " has null tag"); continue; } - byte[] tval = t.getValue(); + byte[] tval = TagUtil.cloneValue(t); assertArrayEquals(c.toString() + " has tag" + Bytes.toString(tval), r.getRow(), tval); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java index 70d6d9d..87cb070 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.MultiThreadedAction.DefaultDataGenerator; @@ -77,7 +78,7 @@ public class LoadTestDataGeneratorWithTags extends DefaultDataGenerator { minTagLength + random.nextInt(maxTagLength - minTagLength)); tags = new ArrayList(); for (int n = 0; n < numTags; n++) { - tags.add(new Tag((byte) 127, tag)); + tags.add(new ArrayBackedTag((byte) 127, tag)); } Cell updatedCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(),