Index: hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java (working copy) @@ -19,7 +19,13 @@ package org.apache.hadoop.hbase; -import com.google.protobuf.ByteString; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.master.RegionState; @@ -34,12 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.VersionedWritable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; +import com.google.protobuf.ZeroCopyLiteralByteString; /** @@ -334,7 +335,7 @@ ClusterStatusProtos.RegionState rs = rit.getValue().convert(); RegionSpecifier.Builder spec = RegionSpecifier.newBuilder().setType(RegionSpecifierType.REGION_NAME); - spec.setValue(ByteString.copyFrom(Bytes.toBytes(rit.getKey()))); + spec.setValue(ZeroCopyLiteralByteString.wrap(Bytes.toBytes(rit.getKey()))); RegionInTransition pbRIT = RegionInTransition.newBuilder().setSpec(spec.build()).setRegionState(rs).build(); Index: hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java (working copy) @@ -19,8 +19,9 @@ package org.apache.hadoop.hbase; import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -1230,11 +1231,11 @@ */ public ColumnFamilySchema convert() { ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder(); - builder.setName(ByteString.copyFrom(getName())); + builder.setName(ZeroCopyLiteralByteString.wrap(getName())); for (Map.Entry e: this.values.entrySet()) { BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder(); - aBuilder.setFirst(ByteString.copyFrom(e.getKey().get())); - aBuilder.setSecond(ByteString.copyFrom(e.getValue().get())); + aBuilder.setFirst(ZeroCopyLiteralByteString.wrap(e.getKey().get())); + aBuilder.setSecond(ZeroCopyLiteralByteString.wrap(e.getValue().get())); builder.addAttributes(aBuilder.build()); } for (Map.Entry e : this.configuration.entrySet()) { Index: hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java (working copy) @@ -46,8 +46,8 @@ import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.hadoop.io.DataInputBuffer; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * HRegion information. @@ -843,10 +843,10 @@ builder.setTableName(ProtobufUtil.toProtoTableName(info.getTable())); builder.setRegionId(info.getRegionId()); if (info.getStartKey() != null) { - builder.setStartKey(ByteString.copyFrom(info.getStartKey())); + builder.setStartKey(ZeroCopyLiteralByteString.wrap(info.getStartKey())); } if (info.getEndKey() != null) { - builder.setEndKey(ByteString.copyFrom(info.getEndKey())); + builder.setEndKey(ZeroCopyLiteralByteString.wrap(info.getEndKey())); } builder.setOffline(info.isOffline()); builder.setSplit(info.isSplit()); Index: hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java (working copy) @@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.WritableComparable; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * HTableDescriptor contains the details about an HBase table such as the descriptors of @@ -1435,8 +1435,8 @@ builder.setTableName(ProtobufUtil.toProtoTableName(getTableName())); for (Map.Entry e: this.values.entrySet()) { BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder(); - aBuilder.setFirst(ByteString.copyFrom(e.getKey().get())); - aBuilder.setSecond(ByteString.copyFrom(e.getValue().get())); + aBuilder.setFirst(ZeroCopyLiteralByteString.wrap(e.getKey().get())); + aBuilder.setSecond(ZeroCopyLiteralByteString.wrap(e.getValue().get())); builder.addAttributes(aBuilder.build()); } for (HColumnDescriptor hcd: getColumnFamilies()) { Index: hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java (working copy) @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.executor.EventType; @@ -103,10 +104,10 @@ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(). setHostName(sn.getHostname()).setPort(sn.getPort()).setStartCode(sn.getStartcode()).build(); ZooKeeperProtos.RegionTransition.Builder builder = ZooKeeperProtos.RegionTransition.newBuilder(). - setEventTypeCode(type.getCode()).setRegionName(ByteString.copyFrom(regionName)). + setEventTypeCode(type.getCode()).setRegionName(ZeroCopyLiteralByteString.wrap(regionName)). setServerName(pbsn); builder.setCreateTime(System.currentTimeMillis()); - if (payload != null) builder.setPayload(ByteString.copyFrom(payload)); + if (payload != null) builder.setPayload(ZeroCopyLiteralByteString.wrap(payload)); return new RegionTransition(builder.build()); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java (working copy) @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * ColumnInterpreter for doing Aggregation's with BigDecimal columns. This class @@ -121,7 +121,7 @@ private BigDecimalMsg getProtoForType(BigDecimal t) { BigDecimalMsg.Builder builder = BigDecimalMsg.newBuilder(); - return builder.setBigdecimalMsg(ByteString.copyFrom(Bytes.toBytes(t))).build(); + return builder.setBigdecimalMsg(ZeroCopyLiteralByteString.wrap(Bytes.toBytes(t))).build(); } @Override Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java (working copy) @@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.HConstants.LAST_ROW; import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.Path; @@ -136,8 +137,8 @@ if(userToken != null) { protoDT = SecureBulkLoadProtos.DelegationToken.newBuilder() - .setIdentifier(ByteString.copyFrom(userToken.getIdentifier())) - .setPassword(ByteString.copyFrom(userToken.getPassword())) + .setIdentifier(ZeroCopyLiteralByteString.wrap(userToken.getIdentifier())) + .setPassword(ZeroCopyLiteralByteString.wrap(userToken.getPassword())) .setKind(userToken.getKind().toString()) .setService(userToken.getService().toString()).build(); } @@ -146,7 +147,7 @@ new ArrayList(); for(Pair el: familyPaths) { protoFamilyPaths.add(ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() - .setFamily(ByteString.copyFrom(el.getFirst())) + .setFamily(ZeroCopyLiteralByteString.wrap(el.getFirst())) .setPath(el.getSecond()).build()); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java (working copy) @@ -18,14 +18,15 @@ */ package org.apache.hadoop.hbase.filter; -import com.google.protobuf.ByteString; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.Bytes; +import com.google.protobuf.ZeroCopyLiteralByteString; + /** Base class for byte array comparators */ @InterfaceAudience.Public @InterfaceStability.Stable @@ -53,7 +54,7 @@ ComparatorProtos.ByteArrayComparable convert() { ComparatorProtos.ByteArrayComparable.Builder builder = ComparatorProtos.ByteArrayComparable.newBuilder(); - if (value != null) builder.setValue(ByteString.copyFrom(value)); + if (value != null) builder.setValue(ZeroCopyLiteralByteString.wrap(value)); return builder.build(); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java (working copy) @@ -18,9 +18,8 @@ */ package org.apache.hadoop.hbase.filter; -import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import java.util.ArrayList; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; @@ -29,7 +28,9 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; -import java.util.ArrayList; +import com.google.common.base.Preconditions; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * A filter, based on the ColumnCountGetFilter, takes two arguments: limit and offset. @@ -174,7 +175,7 @@ builder.setOffset(this.offset); } if (this.columnOffset != null) { - builder.setColumnOffset(ByteString.copyFrom(this.columnOffset)); + builder.setColumnOffset(ZeroCopyLiteralByteString.wrap(this.columnOffset)); } return builder.build().toByteArray(); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java (working copy) @@ -19,9 +19,8 @@ package org.apache.hadoop.hbase.filter; -import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import java.util.ArrayList; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; @@ -30,7 +29,9 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; -import java.util.ArrayList; +import com.google.common.base.Preconditions; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * This filter is used for selecting only those keys with columns that matches @@ -94,7 +95,7 @@ public byte [] toByteArray() { FilterProtos.ColumnPrefixFilter.Builder builder = FilterProtos.ColumnPrefixFilter.newBuilder(); - if (this.prefix != null) builder.setPrefix(ByteString.copyFrom(this.prefix)); + if (this.prefix != null) builder.setPrefix(ZeroCopyLiteralByteString.wrap(this.prefix)); return builder.build().toByteArray(); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java (working copy) @@ -22,8 +22,9 @@ import static org.apache.hadoop.hbase.util.Bytes.len; import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; @@ -172,9 +173,9 @@ public byte [] toByteArray() { FilterProtos.ColumnRangeFilter.Builder builder = FilterProtos.ColumnRangeFilter.newBuilder(); - if (this.minColumn != null) builder.setMinColumn(ByteString.copyFrom(this.minColumn)); + if (this.minColumn != null) builder.setMinColumn(ZeroCopyLiteralByteString.wrap(this.minColumn)); builder.setMinColumnInclusive(this.minColumnInclusive); - if (this.maxColumn != null) builder.setMaxColumn(ByteString.copyFrom(this.maxColumn)); + if (this.maxColumn != null) builder.setMaxColumn(ZeroCopyLiteralByteString.wrap(this.maxColumn)); builder.setMaxColumnInclusive(this.maxColumnInclusive); return builder.build().toByteArray(); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java (working copy) @@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * A filter for adding inter-column timestamp matching @@ -225,10 +225,10 @@ FilterProtos.DependentColumnFilter.newBuilder(); builder.setCompareFilter(super.convert()); if (this.columnFamily != null) { - builder.setColumnFamily(ByteString.copyFrom(this.columnFamily)); + builder.setColumnFamily(ZeroCopyLiteralByteString.wrap(this.columnFamily)); } if (this.columnQualifier != null) { - builder.setColumnQualifier(ByteString.copyFrom(this.columnQualifier)); + builder.setColumnQualifier(ZeroCopyLiteralByteString.wrap(this.columnQualifier)); } builder.setDropDependentColumn(this.dropDependentColumn); return builder.build().toByteArray(); Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java (working copy) @@ -20,6 +20,8 @@ import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; @@ -87,7 +89,7 @@ FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder builder = FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder(); for (byte[] qualifier : qualifiers) { - if (qualifier != null) builder.addQualifiers(ByteString.copyFrom(qualifier)); + if (qualifier != null) builder.addQualifiers(ZeroCopyLiteralByteString.wrap(qualifier)); } return builder.build().toByteArray(); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java (working copy) @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; @@ -146,8 +147,8 @@ FilterProtos.FuzzyRowFilter.newBuilder(); for (Pair fuzzyData : fuzzyKeysData) { BytesBytesPair.Builder bbpBuilder = BytesBytesPair.newBuilder(); - bbpBuilder.setFirst(ByteString.copyFrom(fuzzyData.getFirst())); - bbpBuilder.setSecond(ByteString.copyFrom(fuzzyData.getSecond())); + bbpBuilder.setFirst(ZeroCopyLiteralByteString.wrap(fuzzyData.getFirst())); + bbpBuilder.setSecond(ZeroCopyLiteralByteString.wrap(fuzzyData.getSecond())); builder.addFuzzyKeysData(bbpBuilder); } return builder.build().toByteArray(); Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java (working copy) @@ -19,16 +19,17 @@ package org.apache.hadoop.hbase.filter; -import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import java.util.ArrayList; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; -import java.util.ArrayList; +import com.google.common.base.Preconditions; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * A Filter that stops after the given row. There is no "RowStopFilter" because @@ -85,7 +86,7 @@ public byte [] toByteArray() { FilterProtos.InclusiveStopFilter.Builder builder = FilterProtos.InclusiveStopFilter.newBuilder(); - if (this.stopRowKey != null) builder.setStopRowKey(ByteString.copyFrom(this.stopRowKey)); + if (this.stopRowKey != null) builder.setStopRowKey(ZeroCopyLiteralByteString.wrap(this.stopRowKey)); return builder.build().toByteArray(); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java (working copy) @@ -17,8 +17,10 @@ */ package org.apache.hadoop.hbase.filter; -import com.google.protobuf.ByteString; + import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; @@ -112,7 +114,7 @@ FilterProtos.MultipleColumnPrefixFilter.Builder builder = FilterProtos.MultipleColumnPrefixFilter.newBuilder(); for (byte [] element : sortedPrefixes) { - if (element != null) builder.addSortedPrefixes(ByteString.copyFrom(element)); + if (element != null) builder.addSortedPrefixes(ZeroCopyLiteralByteString.wrap(element)); } return builder.build().toByteArray(); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java (working copy) @@ -20,8 +20,9 @@ package org.apache.hadoop.hbase.filter; import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -90,7 +91,7 @@ public byte [] toByteArray() { FilterProtos.PrefixFilter.Builder builder = FilterProtos.PrefixFilter.newBuilder(); - if (this.prefix != null) builder.setPrefix(ByteString.copyFrom(this.prefix)); + if (this.prefix != null) builder.setPrefix(ZeroCopyLiteralByteString.wrap(this.prefix)); return builder.build().toByteArray(); } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java (working copy) @@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.filter; -import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import java.io.IOException; +import java.util.ArrayList; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -38,8 +38,9 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.util.Bytes; -import java.io.IOException; -import java.util.ArrayList; +import com.google.common.base.Preconditions; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp} @@ -306,10 +307,10 @@ FilterProtos.SingleColumnValueFilter.Builder builder = FilterProtos.SingleColumnValueFilter.newBuilder(); if (this.columnFamily != null) { - builder.setColumnFamily(ByteString.copyFrom(this.columnFamily)); + builder.setColumnFamily(ZeroCopyLiteralByteString.wrap(this.columnFamily)); } if (this.columnQualifier != null) { - builder.setColumnQualifier(ByteString.copyFrom(this.columnQualifier)); + builder.setColumnQualifier(ZeroCopyLiteralByteString.wrap(this.columnQualifier)); } HBaseProtos.CompareType compareOp = CompareType.valueOf(this.compareOp.name()); builder.setCompareOp(compareOp); Index: hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java (working copy) @@ -18,9 +18,8 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import java.io.IOException; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -30,7 +29,9 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; -import java.io.IOException; +import com.google.protobuf.Descriptors; +import com.google.protobuf.Message; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Provides clients with an RPC connection to call coprocessor endpoint {@link com.google.protobuf.Service}s @@ -60,7 +61,7 @@ final ClientProtos.CoprocessorServiceCall call = ClientProtos.CoprocessorServiceCall.newBuilder() - .setRow(ByteString.copyFrom(HConstants.EMPTY_BYTE_ARRAY)) + .setRow(ZeroCopyLiteralByteString.wrap(HConstants.EMPTY_BYTE_ARRAY)) .setServiceName(method.getService().getFullName()) .setMethodName(method.getName()) .setRequest(request.toByteString()).build(); Index: hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java (working copy) @@ -32,9 +32,9 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors; import com.google.protobuf.Message; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Provides clients with an RPC connection to call coprocessor endpoint {@link com.google.protobuf.Service}s @@ -76,7 +76,7 @@ final ClientProtos.CoprocessorServiceCall call = ClientProtos.CoprocessorServiceCall.newBuilder() - .setRow(ByteString.copyFrom(row)) + .setRow(ZeroCopyLiteralByteString.wrap(row)) .setServiceName(method.getService().getFullName()) .setMethodName(method.getName()) .setRequest(request.toByteString()).build(); Index: hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (working copy) @@ -138,6 +138,7 @@ import com.google.protobuf.Service; import com.google.protobuf.ServiceException; import com.google.protobuf.TextFormat; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Protobufs utility. @@ -758,17 +759,17 @@ NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); for (Map.Entry attribute: attributes.entrySet()) { attributeBuilder.setName(attribute.getKey()); - attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + attributeBuilder.setValue(ZeroCopyLiteralByteString.wrap(attribute.getValue())); scanBuilder.addAttribute(attributeBuilder.build()); } } byte[] startRow = scan.getStartRow(); if (startRow != null && startRow.length > 0) { - scanBuilder.setStartRow(ByteString.copyFrom(startRow)); + scanBuilder.setStartRow(ZeroCopyLiteralByteString.wrap(startRow)); } byte[] stopRow = scan.getStopRow(); if (stopRow != null && stopRow.length > 0) { - scanBuilder.setStopRow(ByteString.copyFrom(stopRow)); + scanBuilder.setStopRow(ZeroCopyLiteralByteString.wrap(stopRow)); } if (scan.hasFilter()) { scanBuilder.setFilter(ProtobufUtil.toFilter(scan.getFilter())); @@ -777,12 +778,12 @@ Column.Builder columnBuilder = Column.newBuilder(); for (Map.Entry> family: scan.getFamilyMap().entrySet()) { - columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.setFamily(ZeroCopyLiteralByteString.wrap(family.getKey())); NavigableSet qualifiers = family.getValue(); columnBuilder.clearQualifier(); if (qualifiers != null && qualifiers.size() > 0) { for (byte [] qualifier: qualifiers) { - columnBuilder.addQualifier(ByteString.copyFrom(qualifier)); + columnBuilder.addQualifier(ZeroCopyLiteralByteString.wrap(qualifier)); } } scanBuilder.addColumn(columnBuilder.build()); @@ -884,7 +885,7 @@ final Get get) throws IOException { ClientProtos.Get.Builder builder = ClientProtos.Get.newBuilder(); - builder.setRow(ByteString.copyFrom(get.getRow())); + builder.setRow(ZeroCopyLiteralByteString.wrap(get.getRow())); builder.setCacheBlocks(get.getCacheBlocks()); builder.setMaxVersions(get.getMaxVersions()); if (get.getFilter() != null) { @@ -903,7 +904,7 @@ NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); for (Map.Entry attribute: attributes.entrySet()) { attributeBuilder.setName(attribute.getKey()); - attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + attributeBuilder.setValue(ZeroCopyLiteralByteString.wrap(attribute.getValue())); builder.addAttribute(attributeBuilder.build()); } } @@ -912,11 +913,11 @@ Map> families = get.getFamilyMap(); for (Map.Entry> family: families.entrySet()) { NavigableSet qualifiers = family.getValue(); - columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.setFamily(ZeroCopyLiteralByteString.wrap(family.getKey())); columnBuilder.clearQualifier(); if (qualifiers != null && qualifiers.size() > 0) { for (byte[] qualifier: qualifiers) { - columnBuilder.addQualifier(ByteString.copyFrom(qualifier)); + columnBuilder.addQualifier(ZeroCopyLiteralByteString.wrap(qualifier)); } } builder.addColumn(columnBuilder.build()); @@ -945,7 +946,7 @@ */ public static MutationProto toMutation(final Increment increment) { MutationProto.Builder builder = MutationProto.newBuilder(); - builder.setRow(ByteString.copyFrom(increment.getRow())); + builder.setRow(ZeroCopyLiteralByteString.wrap(increment.getRow())); builder.setMutateType(MutationType.INCREMENT); builder.setDurability(toDurability(increment.getDurability())); TimeRange timeRange = increment.getTimeRange(); @@ -959,14 +960,14 @@ ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); for (Map.Entry> family: increment.getFamilyCellMap().entrySet()) { - columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.setFamily(ZeroCopyLiteralByteString.wrap(family.getKey())); columnBuilder.clearQualifierValue(); List values = family.getValue(); if (values != null && values.size() > 0) { for (Cell cell: values) { KeyValue kv = KeyValueUtil.ensureKeyValue(cell); - valueBuilder.setQualifier(ByteString.copyFrom(kv.getQualifier())); - valueBuilder.setValue(ByteString.copyFrom(kv.getValue())); + valueBuilder.setQualifier(ZeroCopyLiteralByteString.wrap(kv.getQualifier())); + valueBuilder.setValue(ZeroCopyLiteralByteString.wrap(kv.getValue())); columnBuilder.addQualifierValue(valueBuilder.build()); } } @@ -989,12 +990,12 @@ ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); for (Map.Entry> family: mutation.getFamilyCellMap().entrySet()) { - columnBuilder.setFamily(ByteString.copyFrom(family.getKey())); + columnBuilder.setFamily(ZeroCopyLiteralByteString.wrap(family.getKey())); columnBuilder.clearQualifierValue(); for (Cell cell: family.getValue()) { KeyValue kv = KeyValueUtil.ensureKeyValue(cell); - valueBuilder.setQualifier(ByteString.copyFrom(kv.getQualifier())); - valueBuilder.setValue(ByteString.copyFrom(kv.getValue())); + valueBuilder.setQualifier(ZeroCopyLiteralByteString.wrap(kv.getQualifier())); + valueBuilder.setValue(ZeroCopyLiteralByteString.wrap(kv.getValue())); valueBuilder.setTimestamp(kv.getTimestamp()); if(cell.getTagsLength() > 0) { valueBuilder.setTags(ByteString.copyFrom(CellUtil.getTagArray(kv))); @@ -1035,7 +1036,7 @@ private static MutationProto.Builder getMutationBuilderAndSetCommonFields(final MutationType type, final Mutation mutation) { MutationProto.Builder builder = MutationProto.newBuilder(); - builder.setRow(ByteString.copyFrom(mutation.getRow())); + builder.setRow(ZeroCopyLiteralByteString.wrap(mutation.getRow())); builder.setMutateType(type); builder.setDurability(toDurability(mutation.getDurability())); builder.setTimestamp(mutation.getTimeStamp()); @@ -1044,7 +1045,7 @@ NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); for (Map.Entry attribute: attributes.entrySet()) { attributeBuilder.setName(attribute.getKey()); - attributeBuilder.setValue(ByteString.copyFrom(attribute.getValue())); + attributeBuilder.setValue(ZeroCopyLiteralByteString.wrap(attribute.getValue())); builder.addAttribute(attributeBuilder.build()); } } @@ -1159,7 +1160,7 @@ public static ComparatorProtos.Comparator toComparator(ByteArrayComparable comparator) { ComparatorProtos.Comparator.Builder builder = ComparatorProtos.Comparator.newBuilder(); builder.setName(comparator.getClass().getName()); - builder.setSerializedComparator(ByteString.copyFrom(comparator.toByteArray())); + builder.setSerializedComparator(ZeroCopyLiteralByteString.wrap(comparator.toByteArray())); return builder.build(); } @@ -1221,7 +1222,7 @@ public static FilterProtos.Filter toFilter(Filter filter) throws IOException { FilterProtos.Filter.Builder builder = FilterProtos.Filter.newBuilder(); builder.setName(filter.getClass().getName()); - builder.setSerializedFilter(ByteString.copyFrom(filter.toByteArray())); + builder.setSerializedFilter(ZeroCopyLiteralByteString.wrap(filter.toByteArray())); return builder.build(); } @@ -1706,10 +1707,10 @@ AccessControlProtos.TablePermission.newBuilder(); builder.setTableName(ProtobufUtil.toProtoTableName(tablePerm.getTableName())); if (tablePerm.hasFamily()) { - builder.setFamily(ByteString.copyFrom(tablePerm.getFamily())); + builder.setFamily(ZeroCopyLiteralByteString.wrap(tablePerm.getFamily())); } if (tablePerm.hasQualifier()) { - builder.setQualifier(ByteString.copyFrom(tablePerm.getQualifier())); + builder.setQualifier(ZeroCopyLiteralByteString.wrap(tablePerm.getQualifier())); } for (Permission.Action a : perm.getActions()) { builder.addAction(toPermissionAction(a)); @@ -1798,7 +1799,7 @@ */ public static AccessControlProtos.UserPermission toUserPermission(UserPermission perm) { return AccessControlProtos.UserPermission.newBuilder() - .setUser(ByteString.copyFrom(perm.getUser())) + .setUser(ZeroCopyLiteralByteString.wrap(perm.getUser())) .setPermission(toPermission(perm)) .build(); } @@ -2070,8 +2071,8 @@ */ public static AuthenticationProtos.Token toToken(Token token) { AuthenticationProtos.Token.Builder builder = AuthenticationProtos.Token.newBuilder(); - builder.setIdentifier(ByteString.copyFrom(token.getIdentifier())); - builder.setPassword(ByteString.copyFrom(token.getPassword())); + builder.setIdentifier(ZeroCopyLiteralByteString.wrap(token.getIdentifier())); + builder.setPassword(ZeroCopyLiteralByteString.wrap(token.getPassword())); if (token.getService() != null) { builder.setService(ByteString.copyFromUtf8(token.getService().toString())); } @@ -2254,9 +2255,9 @@ // input / output paths are relative to the store dir // store dir is relative to region dir CompactionDescriptor.Builder builder = CompactionDescriptor.newBuilder() - .setTableName(ByteString.copyFrom(info.getTableName())) - .setEncodedRegionName(ByteString.copyFrom(info.getEncodedNameAsBytes())) - .setFamilyName(ByteString.copyFrom(family)) + .setTableName(ZeroCopyLiteralByteString.wrap(info.getTableName())) + .setEncodedRegionName(ZeroCopyLiteralByteString.wrap(info.getEncodedNameAsBytes())) + .setFamilyName(ZeroCopyLiteralByteString.wrap(family)) .setStoreHomeDir(storeDir.getName()); //make relative for (Path inputPath : inputPaths) { builder.addCompactionInput(inputPath.getName()); //relative path @@ -2333,8 +2334,8 @@ public static HBaseProtos.TableName toProtoTableName(TableName tableName) { return HBaseProtos.TableName.newBuilder() - .setNamespace(ByteString.copyFrom(tableName.getNamespace())) - .setQualifier(ByteString.copyFrom(tableName.getQualifier())).build(); + .setNamespace(ZeroCopyLiteralByteString.wrap(tableName.getNamespace())) + .setQualifier(ZeroCopyLiteralByteString.wrap(tableName.getQualifier())).build(); } public static TableName[] getTableNameArray(List tableNamesList) { Index: hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java (working copy) @@ -22,15 +22,16 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.CellScannable; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Action; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; @@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; @@ -99,9 +99,9 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Triple; -import org.mortbay.log.Log; import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Helper utility to build protocol buffer requests, @@ -133,10 +133,10 @@ builder.setRegion(region); Column.Builder columnBuilder = Column.newBuilder(); - columnBuilder.setFamily(ByteString.copyFrom(family)); + columnBuilder.setFamily(ZeroCopyLiteralByteString.wrap(family)); ClientProtos.Get.Builder getBuilder = ClientProtos.Get.newBuilder(); - getBuilder.setRow(ByteString.copyFrom(row)); + getBuilder.setRow(ZeroCopyLiteralByteString.wrap(row)); getBuilder.addColumn(columnBuilder.build()); getBuilder.setClosestRowBefore(true); builder.setGet(getBuilder.build()); @@ -181,14 +181,14 @@ builder.setRegion(region); MutationProto.Builder mutateBuilder = MutationProto.newBuilder(); - mutateBuilder.setRow(ByteString.copyFrom(row)); + mutateBuilder.setRow(ZeroCopyLiteralByteString.wrap(row)); mutateBuilder.setMutateType(MutationType.INCREMENT); mutateBuilder.setDurability(ProtobufUtil.toDurability(durability)); ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); - columnBuilder.setFamily(ByteString.copyFrom(family)); + columnBuilder.setFamily(ZeroCopyLiteralByteString.wrap(family)); QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); - valueBuilder.setValue(ByteString.copyFrom(Bytes.toBytes(amount))); - valueBuilder.setQualifier(ByteString.copyFrom(qualifier)); + valueBuilder.setValue(ZeroCopyLiteralByteString.wrap(Bytes.toBytes(amount))); + valueBuilder.setQualifier(ZeroCopyLiteralByteString.wrap(qualifier)); columnBuilder.addQualifierValue(valueBuilder.build()); mutateBuilder.addColumnValue(columnBuilder.build()); builder.setMutation(mutateBuilder.build()); @@ -466,7 +466,7 @@ builder.setRegion(region); FamilyPath.Builder familyPathBuilder = FamilyPath.newBuilder(); for (Pair familyPath: familyPaths) { - familyPathBuilder.setFamily(ByteString.copyFrom(familyPath.getFirst())); + familyPathBuilder.setFamily(ZeroCopyLiteralByteString.wrap(familyPath.getFirst())); familyPathBuilder.setPath(familyPath.getSecond()); builder.addFamilyPath(familyPathBuilder.build()); } @@ -629,7 +629,7 @@ RegionSpecifier region = buildRegionSpecifier( RegionSpecifierType.REGION_NAME, regionName); builder.setRegion(region); - builder.addFamily(ByteString.copyFrom(family)); + builder.addFamily(ZeroCopyLiteralByteString.wrap(family)); return builder.build(); } @@ -775,7 +775,7 @@ RegionSpecifierType.REGION_NAME, regionName); builder.setRegion(region); if (splitPoint != null) { - builder.setSplitPoint(ByteString.copyFrom(splitPoint)); + builder.setSplitPoint(ZeroCopyLiteralByteString.wrap(splitPoint)); } return builder.build(); } @@ -815,7 +815,7 @@ builder.setRegion(region); builder.setMajor(major); if (family != null) { - builder.setFamily(ByteString.copyFrom(family)); + builder.setFamily(ZeroCopyLiteralByteString.wrap(family)); } return builder.build(); } @@ -874,7 +874,7 @@ public static RegionSpecifier buildRegionSpecifier( final RegionSpecifierType type, final byte[] value) { RegionSpecifier.Builder regionBuilder = RegionSpecifier.newBuilder(); - regionBuilder.setValue(ByteString.copyFrom(value)); + regionBuilder.setValue(ZeroCopyLiteralByteString.wrap(value)); regionBuilder.setType(type); return regionBuilder.build(); } @@ -895,9 +895,9 @@ final ByteArrayComparable comparator, final CompareType compareType) throws IOException { Condition.Builder builder = Condition.newBuilder(); - builder.setRow(ByteString.copyFrom(row)); - builder.setFamily(ByteString.copyFrom(family)); - builder.setQualifier(ByteString.copyFrom(qualifier)); + builder.setRow(ZeroCopyLiteralByteString.wrap(row)); + builder.setFamily(ZeroCopyLiteralByteString.wrap(family)); + builder.setQualifier(ZeroCopyLiteralByteString.wrap(qualifier)); builder.setComparator(ProtobufUtil.toComparator(comparator)); builder.setCompareType(compareType); return builder.build(); @@ -929,7 +929,7 @@ final TableName tableName, final byte [] columnName) { DeleteColumnRequest.Builder builder = DeleteColumnRequest.newBuilder(); builder.setTableName(ProtobufUtil.toProtoTableName((tableName))); - builder.setColumnName(ByteString.copyFrom(columnName)); + builder.setColumnName(ZeroCopyLiteralByteString.wrap(columnName)); return builder.build(); } @@ -1069,7 +1069,7 @@ builder.setTableSchema(hTableDesc.convert()); if (splitKeys != null) { for (byte [] splitKey : splitKeys) { - builder.addSplitKeys(ByteString.copyFrom(splitKey)); + builder.addSplitKeys(ZeroCopyLiteralByteString.wrap(splitKey)); } } return builder.build(); @@ -1222,7 +1222,7 @@ public static GetLastFlushedSequenceIdRequest buildGetLastFlushedSequenceIdRequest( byte[] regionName) { return GetLastFlushedSequenceIdRequest.newBuilder().setRegionName( - ByteString.copyFrom(regionName)).build(); + ZeroCopyLiteralByteString.wrap(regionName)).build(); } /** @@ -1277,10 +1277,10 @@ permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName)); if (family != null) { - permissionBuilder.setFamily(ByteString.copyFrom(family)); + permissionBuilder.setFamily(ZeroCopyLiteralByteString.wrap(family)); } if (qualifier != null) { - permissionBuilder.setQualifier(ByteString.copyFrom(qualifier)); + permissionBuilder.setQualifier(ZeroCopyLiteralByteString.wrap(qualifier)); } ret.setType(AccessControlProtos.Permission.Type.Table) .setTablePermission(permissionBuilder); @@ -1373,10 +1373,10 @@ permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName)); } if (family != null) { - permissionBuilder.setFamily(ByteString.copyFrom(family)); + permissionBuilder.setFamily(ZeroCopyLiteralByteString.wrap(family)); } if (qualifier != null) { - permissionBuilder.setQualifier(ByteString.copyFrom(qualifier)); + permissionBuilder.setQualifier(ZeroCopyLiteralByteString.wrap(qualifier)); } ret.setType(AccessControlProtos.Permission.Type.Table) .setTablePermission(permissionBuilder); Index: hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java (revision 1537632) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java (working copy) @@ -69,8 +69,8 @@ import org.apache.zookeeper.proto.SetDataRequest; import org.apache.zookeeper.server.ZooKeeperSaslServer; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Internal HBase utility class for ZooKeeper. @@ -1946,7 +1946,7 @@ if (storeSequenceIds != null) { for (byte[] columnFamilyName : storeSequenceIds.keySet()) { Long curSeqId = storeSequenceIds.get(columnFamilyName); - storeSequenceIdBuilder.setFamilyName(ByteString.copyFrom(columnFamilyName)); + storeSequenceIdBuilder.setFamilyName(ZeroCopyLiteralByteString.wrap(columnFamilyName)); storeSequenceIdBuilder.setSequenceId(curSeqId); regionSequenceIdsBuilder.addStoreSequenceId(storeSequenceIdBuilder.build()); storeSequenceIdBuilder.clear(); Index: hbase-protocol/src/main/java/com/google/protobuf/ZeroCopyLiteralByteString.java =================================================================== --- hbase-protocol/src/main/java/com/google/protobuf/ZeroCopyLiteralByteString.java (revision 0) +++ hbase-protocol/src/main/java/com/google/protobuf/ZeroCopyLiteralByteString.java (working copy) @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.protobuf; // This is a lie. + +/** + * Helper class to extract byte arrays from {@link ByteString} without copy. + *

+ * Without this protobufs would force us to copy every single byte array out + * of the objects de-serialized from the wire (which already do one copy, on + * top of the copies the JVM does to go from kernel buffer to C buffer and + * from C buffer to JVM buffer). + * + * @since 0.96.1 + */ +public final class ZeroCopyLiteralByteString extends LiteralByteString { + // Gotten from AsyncHBase code base with permission. + /** Private constructor so this class cannot be instantiated. */ + private ZeroCopyLiteralByteString() { + super(null); + throw new UnsupportedOperationException("Should never be here."); + } + + /** + * Wraps a byte array in a {@link ByteString} without copying it. + */ + public static ByteString wrap(final byte[] array) { + return new LiteralByteString(array); + } + + // TODO: + // ZeroCopyLiteralByteString.wrap(this.buf, 0, this.count); + + /** + * Extracts the byte array from the given {@link ByteString} without copy. + * @param buf A buffer from which to extract the array. This buffer must be + * actually an instance of a {@code LiteralByteString}. + */ + public static byte[] zeroCopyGetBytes(final LiteralByteString buf) { + return buf.bytes; + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java (working copy) @@ -24,13 +24,9 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.codec.BaseDecoder; -import org.apache.hadoop.hbase.codec.BaseEncoder; -import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import com.google.protobuf.ByteString; -import org.apache.hadoop.classification.InterfaceStability; /** * Codec that just writes out Cell as a protobuf Cell Message. Does not write the mvcc stamp. Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.util.Bytes; import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * A reference to the top or bottom half of a store file where 'bottom' is the first half @@ -194,7 +195,7 @@ FSProtos.Reference.Builder builder = FSProtos.Reference.newBuilder(); builder.setRange(isTopFileRegion(getFileRegion())? FSProtos.Reference.Range.TOP: FSProtos.Reference.Range.BOTTOM); - builder.setSplitkey(ByteString.copyFrom(getSplitKey())); + builder.setSplitkey(ZeroCopyLiteralByteString.wrap(getSplitKey())); return builder.build(); } Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (working copy) @@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.Writable; +import com.google.protobuf.ZeroCopyLiteralByteString; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -191,18 +192,18 @@ static final AtomicLong checksumFailures = new AtomicLong(); // For getting more detailed stats on FS latencies - // If, for some reason, the metrics subsystem stops polling for latencies, + // If, for some reason, the metrics subsystem stops polling for latencies, // I don't want data to pile up in a memory leak // so, after LATENCY_BUFFER_SIZE items have been enqueued for processing, // fs latency stats will be dropped (and this behavior will be logged) private static final int LATENCY_BUFFER_SIZE = 5000; - private static final BlockingQueue fsReadLatenciesNanos = + private static final BlockingQueue fsReadLatenciesNanos = new ArrayBlockingQueue(LATENCY_BUFFER_SIZE); - private static final BlockingQueue fsWriteLatenciesNanos = + private static final BlockingQueue fsWriteLatenciesNanos = new ArrayBlockingQueue(LATENCY_BUFFER_SIZE); - private static final BlockingQueue fsPreadLatenciesNanos = + private static final BlockingQueue fsPreadLatenciesNanos = new ArrayBlockingQueue(LATENCY_BUFFER_SIZE); - + public static final void offerReadLatency(long latencyNanos, boolean pread) { if (pread) { fsPreadLatenciesNanos.offer(latencyNanos); // might be silently dropped, if the queue is full @@ -214,30 +215,30 @@ readOps.incrementAndGet(); } } - + public static final void offerWriteLatency(long latencyNanos) { fsWriteLatenciesNanos.offer(latencyNanos); // might be silently dropped, if the queue is full - + writeTimeNano.addAndGet(latencyNanos); writeOps.incrementAndGet(); } - + public static final Collection getReadLatenciesNanos() { - final List latencies = + final List latencies = Lists.newArrayListWithCapacity(fsReadLatenciesNanos.size()); fsReadLatenciesNanos.drainTo(latencies); return latencies; } public static final Collection getPreadLatenciesNanos() { - final List latencies = + final List latencies = Lists.newArrayListWithCapacity(fsPreadLatenciesNanos.size()); fsPreadLatenciesNanos.drainTo(latencies); return latencies; } - + public static final Collection getWriteLatenciesNanos() { - final List latencies = + final List latencies = Lists.newArrayListWithCapacity(fsWriteLatenciesNanos.size()); fsWriteLatenciesNanos.drainTo(latencies); return latencies; @@ -380,7 +381,7 @@ if (path != null) { ostream = AbstractHFileWriter.createOutputStream(conf, fs, path, favoredNodes); } - return createWriter(fs, path, ostream, + return createWriter(fs, path, ostream, comparator, fileContext); } @@ -743,8 +744,8 @@ HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder(); for (Map.Entry e: this.map.entrySet()) { HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder(); - bbpBuilder.setFirst(ByteString.copyFrom(e.getKey())); - bbpBuilder.setSecond(ByteString.copyFrom(e.getValue())); + bbpBuilder.setFirst(ZeroCopyLiteralByteString.wrap(e.getKey())); + bbpBuilder.setSecond(ZeroCopyLiteralByteString.wrap(e.getValue())); builder.addMapEntry(bbpBuilder.build()); } out.write(ProtobufUtil.PB_MAGIC); @@ -786,7 +787,7 @@ /** Now parse the old Writable format. It was a list of Map entries. Each map entry was a key and a value of * a byte []. The old map format had a byte before each entry that held a code which was short for the key or * value type. We know it was a byte [] so in below we just read and dump it. - * @throws IOException + * @throws IOException */ void parseWritable(final DataInputStream in) throws IOException { // First clear the map. Otherwise we will just accumulate entries every time this method is called. Index: hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java (working copy) @@ -43,8 +43,8 @@ import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Pair; -import com.google.protobuf.ByteString; import com.google.protobuf.ServiceException; +import com.google.protobuf.ZeroCopyLiteralByteString; @InterfaceAudience.Private public class ReplicationProtbufUtil { @@ -89,8 +89,8 @@ WALProtos.WALKey.Builder keyBuilder = entryBuilder.getKeyBuilder(); HLogKey key = entry.getKey(); keyBuilder.setEncodedRegionName( - ByteString.copyFrom(key.getEncodedRegionName())); - keyBuilder.setTableName(ByteString.copyFrom(key.getTablename().getName())); + ZeroCopyLiteralByteString.wrap(key.getEncodedRegionName())); + keyBuilder.setTableName(ZeroCopyLiteralByteString.wrap(key.getTablename().getName())); keyBuilder.setLogSequenceNumber(key.getLogSeqNum()); keyBuilder.setWriteTime(key.getWriteTime()); for(UUID clusterId : key.getClusterIds()) { @@ -102,7 +102,7 @@ NavigableMap scopes = key.getScopes(); if (scopes != null && !scopes.isEmpty()) { for (Map.Entry scope: scopes.entrySet()) { - scopeBuilder.setFamily(ByteString.copyFrom(scope.getKey())); + scopeBuilder.setFamily(ZeroCopyLiteralByteString.wrap(scope.getKey())); WALProtos.ScopeType scopeType = WALProtos.ScopeType.valueOf(scope.getValue().intValue()); scopeBuilder.setScopeType(scopeType); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (working copy) @@ -197,7 +197,6 @@ import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.trace.SpanReceiverHost; import org.apache.hadoop.hbase.util.Bytes; @@ -235,6 +234,7 @@ import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; import com.google.protobuf.TextFormat; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * HRegionServer makes a set of HRegions available to clients. It checks in with @@ -1296,7 +1296,7 @@ RegionLoad.Builder regionLoad = RegionLoad.newBuilder(); RegionSpecifier.Builder regionSpecifier = RegionSpecifier.newBuilder(); regionSpecifier.setType(RegionSpecifierType.REGION_NAME); - regionSpecifier.setValue(ByteString.copyFrom(name)); + regionSpecifier.setValue(ZeroCopyLiteralByteString.wrap(name)); regionLoad.setRegionSpecifier(regionSpecifier.build()) .setStores(stores) .setStorefiles(storefiles) @@ -3920,7 +3920,7 @@ RollWALWriterResponse.Builder builder = RollWALWriterResponse.newBuilder(); if (regionsToFlush != null) { for (byte[] region: regionsToFlush) { - builder.addRegionToFlush(ByteString.copyFrom(region)); + builder.addRegionToFlush(ZeroCopyLiteralByteString.wrap(region)); } } return builder.build(); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java (working copy) @@ -25,7 +25,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.NavigableMap; @@ -35,9 +34,9 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType; @@ -47,6 +46,7 @@ import org.apache.hadoop.io.WritableUtils; import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * A Key for an entry in the change log. @@ -425,8 +425,8 @@ WALCellCodec.ByteStringCompressor compressor) throws IOException { WALKey.Builder builder = WALKey.newBuilder(); if (compressionContext == null) { - builder.setEncodedRegionName(ByteString.copyFrom(this.encodedRegionName)); - builder.setTableName(ByteString.copyFrom(this.tablename.getName())); + builder.setEncodedRegionName(ZeroCopyLiteralByteString.wrap(this.encodedRegionName)); + builder.setTableName(ZeroCopyLiteralByteString.wrap(this.tablename.getName())); } else { builder.setEncodedRegionName( compressor.compress(this.encodedRegionName, compressionContext.regionDict)); @@ -443,7 +443,7 @@ } if (scopes != null) { for (Map.Entry e : scopes.entrySet()) { - ByteString family = (compressionContext == null) ? ByteString.copyFrom(e.getKey()) + ByteString family = (compressionContext == null) ? ZeroCopyLiteralByteString.wrap(e.getKey()) : compressor.compress(e.getKey(), compressionContext.familyDict); builder.addScopes(FamilyScope.newBuilder() .setFamily(family).setScopeType(ScopeType.valueOf(e.getValue()))); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java (working copy) @@ -33,10 +33,10 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; - -import com.google.protobuf.ByteString; import org.codehaus.jackson.annotate.JsonProperty; +import com.google.protobuf.ZeroCopyLiteralByteString; + /** * Representation of a cell. A cell is a single value associated a column and * optional qualifier, and either the timestamp when it was stored or the user- @@ -185,8 +185,8 @@ @Override public byte[] createProtobufOutput() { Cell.Builder builder = Cell.newBuilder(); - builder.setColumn(ByteString.copyFrom(getColumn())); - builder.setData(ByteString.copyFrom(getValue())); + builder.setColumn(ZeroCopyLiteralByteString.wrap(getColumn())); + builder.setData(ZeroCopyLiteralByteString.wrap(getValue())); if (hasUserTimestamp()) { builder.setTimestamp(getTimestamp()); } Index: hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java (working copy) @@ -26,8 +26,8 @@ import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; -import javax.xml.bind.annotation.XmlElement; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet; -import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Representation of a grouping of cells. May contain cells from more than @@ -115,11 +115,11 @@ CellSet.Builder builder = CellSet.newBuilder(); for (RowModel row: getRows()) { CellSet.Row.Builder rowBuilder = CellSet.Row.newBuilder(); - rowBuilder.setKey(ByteString.copyFrom(row.getKey())); + rowBuilder.setKey(ZeroCopyLiteralByteString.wrap(row.getKey())); for (CellModel cell: row.getCells()) { Cell.Builder cellBuilder = Cell.newBuilder(); - cellBuilder.setColumn(ByteString.copyFrom(cell.getColumn())); - cellBuilder.setData(ByteString.copyFrom(cell.getValue())); + cellBuilder.setColumn(ZeroCopyLiteralByteString.wrap(cell.getColumn())); + cellBuilder.setData(ZeroCopyLiteralByteString.wrap(cell.getValue())); if (cell.hasUserTimestamp()) { cellBuilder.setTimestamp(cell.getTimestamp()); } Index: hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java (working copy) @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes; import com.google.protobuf.ByteString; - +import com.google.protobuf.ZeroCopyLiteralByteString; import com.sun.jersey.api.json.JSONConfiguration; import com.sun.jersey.api.json.JSONJAXBContext; import com.sun.jersey.api.json.JSONMarshaller; @@ -708,13 +708,13 @@ public byte[] createProtobufOutput() { Scanner.Builder builder = Scanner.newBuilder(); if (!Bytes.equals(startRow, HConstants.EMPTY_START_ROW)) { - builder.setStartRow(ByteString.copyFrom(startRow)); + builder.setStartRow(ZeroCopyLiteralByteString.wrap(startRow)); } if (!Bytes.equals(endRow, HConstants.EMPTY_START_ROW)) { - builder.setEndRow(ByteString.copyFrom(endRow)); + builder.setEndRow(ZeroCopyLiteralByteString.wrap(endRow)); } for (byte[] column: columns) { - builder.addColumns(ByteString.copyFrom(column)); + builder.addColumns(ZeroCopyLiteralByteString.wrap(column)); } if (startTime != 0) { builder.setStartTime(startTime); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java (working copy) @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Representation of the status of a storage cluster: @@ -722,7 +722,7 @@ for (Node.Region region: node.regions) { StorageClusterStatus.Region.Builder regionBuilder = StorageClusterStatus.Region.newBuilder(); - regionBuilder.setName(ByteString.copyFrom(region.name)); + regionBuilder.setName(ZeroCopyLiteralByteString.wrap(region.name)); regionBuilder.setStores(region.stores); regionBuilder.setStorefiles(region.storefiles); regionBuilder.setStorefileSizeMB(region.storefileSizeMB); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java (revision 1537632) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java (working copy) @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo; -import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Representation of a list of table regions. @@ -135,8 +135,8 @@ TableInfo.Region.Builder regionBuilder = TableInfo.Region.newBuilder(); regionBuilder.setName(aRegion.getName()); regionBuilder.setId(aRegion.getId()); - regionBuilder.setStartKey(ByteString.copyFrom(aRegion.getStartKey())); - regionBuilder.setEndKey(ByteString.copyFrom(aRegion.getEndKey())); + regionBuilder.setStartKey(ZeroCopyLiteralByteString.wrap(aRegion.getStartKey())); + regionBuilder.setEndKey(ZeroCopyLiteralByteString.wrap(aRegion.getEndKey())); regionBuilder.setLocation(aRegion.getLocation()); builder.addRegions(regionBuilder); } Index: hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java (revision 1537632) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java (working copy) @@ -18,18 +18,21 @@ */ package org.apache.hadoop.hbase.coprocessor; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import java.io.IOException; import java.util.Collections; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -37,6 +40,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; @@ -53,11 +57,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * TestEndpoint: test cases to verify coprocessor Endpoint @@ -123,9 +125,9 @@ new BlockingRpcCallback(); ColumnAggregationProtos.SumRequest.Builder builder = ColumnAggregationProtos.SumRequest.newBuilder(); - builder.setFamily(ByteString.copyFrom(family)); + builder.setFamily(ZeroCopyLiteralByteString.wrap(family)); if (qualifier != null && qualifier.length > 0) { - builder.setQualifier(ByteString.copyFrom(qualifier)); + builder.setQualifier(ZeroCopyLiteralByteString.wrap(qualifier)); } instance.sum(null, builder.build(), rpcCallback); return rpcCallback.get().getSum(); Index: hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java (revision 1537632) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java (working copy) @@ -32,46 +32,45 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.hadoop.hbase.MediumTests; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.coprocessor.RowProcessorClient; -import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse; -import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest; import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse; import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService; import org.apache.hadoop.hbase.regionserver.BaseRowProcessor; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; -import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; - import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; import com.google.protobuf.Message; +import com.google.protobuf.ZeroCopyLiteralByteString; import com.sun.org.apache.commons.logging.Log; import com.sun.org.apache.commons.logging.LogFactory; @@ -363,7 +362,7 @@ public IncCounterProcessorRequest getRequestData() throws IOException { IncCounterProcessorRequest.Builder builder = IncCounterProcessorRequest.newBuilder(); builder.setCounter(counter); - builder.setRow(ByteString.copyFrom(row)); + builder.setRow(ZeroCopyLiteralByteString.wrap(row)); return builder.build(); } @@ -442,8 +441,8 @@ public FriendsOfFriendsProcessorRequest getRequestData() throws IOException { FriendsOfFriendsProcessorRequest.Builder builder = FriendsOfFriendsProcessorRequest.newBuilder(); - builder.setPerson(ByteString.copyFrom(person)); - builder.setRow(ByteString.copyFrom(row)); + builder.setPerson(ZeroCopyLiteralByteString.wrap(person)); + builder.setRow(ZeroCopyLiteralByteString.wrap(row)); builder.addAllResult(result); FriendsOfFriendsProcessorRequest f = builder.build(); return f; @@ -547,8 +546,8 @@ @Override public RowSwapProcessorRequest getRequestData() throws IOException { RowSwapProcessorRequest.Builder builder = RowSwapProcessorRequest.newBuilder(); - builder.setRow1(ByteString.copyFrom(row1)); - builder.setRow2(ByteString.copyFrom(row2)); + builder.setRow1(ZeroCopyLiteralByteString.wrap(row1)); + builder.setRow2(ZeroCopyLiteralByteString.wrap(row2)); return builder.build(); } @@ -607,7 +606,7 @@ @Override public TimeoutProcessorRequest getRequestData() throws IOException { TimeoutProcessorRequest.Builder builder = TimeoutProcessorRequest.newBuilder(); - builder.setRow(ByteString.copyFrom(row)); + builder.setRow(ZeroCopyLiteralByteString.wrap(row)); return builder.build(); } Index: hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java (revision 1537632) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java (working copy) @@ -42,6 +42,7 @@ import org.junit.experimental.categories.Category; import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Class to test ProtobufUtil. @@ -53,12 +54,12 @@ NameBytesPair.Builder builder = NameBytesPair.newBuilder(); final String omg = "OMG!!!"; builder.setName("java.io.IOException"); - builder.setValue(ByteString.copyFrom(Bytes.toBytes(omg))); + builder.setValue(ZeroCopyLiteralByteString.wrap(Bytes.toBytes(omg))); Throwable t = ProtobufUtil.toException(builder.build()); assertEquals(omg, t.getMessage()); builder.clear(); builder.setName("org.apache.hadoop.ipc.RemoteException"); - builder.setValue(ByteString.copyFrom(Bytes.toBytes(omg))); + builder.setValue(ZeroCopyLiteralByteString.wrap(Bytes.toBytes(omg))); t = ProtobufUtil.toException(builder.build()); assertEquals(omg, t.getMessage()); } @@ -202,10 +203,10 @@ valueBuilder.setFamily(ByteString.copyFromUtf8("f1")); QualifierValue.Builder qualifierBuilder = QualifierValue.newBuilder(); qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c1")); - qualifierBuilder.setValue(ByteString.copyFrom(Bytes.toBytes(11L))); + qualifierBuilder.setValue(ZeroCopyLiteralByteString.wrap(Bytes.toBytes(11L))); valueBuilder.addQualifierValue(qualifierBuilder.build()); qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c2")); - qualifierBuilder.setValue(ByteString.copyFrom(Bytes.toBytes(22L))); + qualifierBuilder.setValue(ZeroCopyLiteralByteString.wrap(Bytes.toBytes(22L))); valueBuilder.addQualifierValue(qualifierBuilder.build()); mutateBuilder.addColumnValue(valueBuilder.build()); Index: hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java (revision 1537632) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java (working copy) @@ -42,6 +42,7 @@ import org.mockito.Mockito; import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; /** * Tests that verify certain RPCs get a higher QoS. */ @@ -69,12 +70,12 @@ GetRequest.Builder getRequestBuilder = GetRequest.newBuilder(); RegionSpecifier.Builder regionSpecifierBuilder = RegionSpecifier.newBuilder(); regionSpecifierBuilder.setType(RegionSpecifierType.REGION_NAME); - ByteString name = ByteString.copyFrom(HRegionInfo.FIRST_META_REGIONINFO.getRegionName()); + ByteString name = ZeroCopyLiteralByteString.wrap(HRegionInfo.FIRST_META_REGIONINFO.getRegionName()); regionSpecifierBuilder.setValue(name); RegionSpecifier regionSpecifier = regionSpecifierBuilder.build(); getRequestBuilder.setRegion(regionSpecifier); Get.Builder getBuilder = Get.newBuilder(); - getBuilder.setRow(ByteString.copyFrom("somerow".getBytes())); + getBuilder.setRow(ZeroCopyLiteralByteString.wrap("somerow".getBytes())); getRequestBuilder.setGet(getBuilder.build()); GetRequest getRequest = getRequestBuilder.build(); RequestHeader header = headerBuilder.build(); @@ -143,4 +144,4 @@ Mockito.when(mockRegionInfo.isMetaRegion()).thenReturn(false); assertEquals(HConstants.NORMAL_QOS, priority.getPriority(header, scanRequest)); } -} \ No newline at end of file +} Index: hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java (revision 1537632) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java (working copy) @@ -28,7 +28,13 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; @@ -44,7 +50,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import com.google.protobuf.ZeroCopyLiteralByteString; @Category(MediumTests.class) public class TestReplicationSink { @@ -256,9 +262,9 @@ uuidBuilder.setLeastSigBits(HConstants.DEFAULT_CLUSTER_ID.getLeastSignificantBits()); uuidBuilder.setMostSigBits(HConstants.DEFAULT_CLUSTER_ID.getMostSignificantBits()); keyBuilder.setClusterId(uuidBuilder.build()); - keyBuilder.setTableName(ByteString.copyFrom(table)); + keyBuilder.setTableName(ZeroCopyLiteralByteString.wrap(table)); keyBuilder.setWriteTime(now); - keyBuilder.setEncodedRegionName(ByteString.copyFrom(HConstants.EMPTY_BYTE_ARRAY)); + keyBuilder.setEncodedRegionName(ZeroCopyLiteralByteString.wrap(HConstants.EMPTY_BYTE_ARRAY)); keyBuilder.setLogSequenceNumber(-1); builder.setKey(keyBuilder.build()); cells.add(kv);