diff --git a/src/main/java/org/apache/hadoop/hbase/client/Result.java b/src/main/java/org/apache/hadoop/hbase/client/Result.java index 6bdc892..c958ae5 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/Result.java +++ b/src/main/java/org/apache/hadoop/hbase/client/Result.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.WritableWithSize; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableUtils; import java.io.DataInput; import java.io.DataOutput; @@ -34,6 +35,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.NavigableMap; @@ -580,6 +582,9 @@ public class Result implements Writable, WritableWithSize { public static void writeArray(final DataOutput out, Result [] results) throws IOException { + + + // Write version when writing array form. // This assumes that results are sent to the client as Result[], so we // have an opportunity to handle version differences without affecting @@ -589,27 +594,60 @@ public class Result implements Writable, WritableWithSize { out.writeInt(0); return; } - out.writeInt(results.length); - int bufLen = 0; - for(Result result : results) { - bufLen += Bytes.SIZEOF_INT; - if(result == null || result.isEmpty()) { - continue; - } - for(KeyValue key : result.raw()) { - bufLen += key.getLength() + Bytes.SIZEOF_INT; + out.writeInt(results.length); // R + + List serDict = new ArrayList(); + // hashmap might work better, but byte[].hashCode is just Object.hashCode + Map dict = new TreeMap(Bytes.BYTES_COMPARATOR); + int dictEntries = 0; + for (Result r : results) { + if (r.isEmpty()) continue; + + // collect all the family and qualifier names. + for (KeyValue kv : r.raw()) { + byte[] family = kv.getFamily(); + byte[] qualifier = kv.getQualifier(); + if (!dict.containsKey(family)) { + dictEntries++; + dict.put(family, dictEntries); + + serDict.add(family); + } + if (!dict.containsKey(qualifier)) { + dictEntries++; + dict.put(qualifier, dictEntries); + + serDict.add(qualifier); + } } } - out.writeInt(bufLen); - for(Result result : results) { - if(result == null || result.isEmpty()) { - out.writeInt(0); + + // write out dictionary: + out.writeInt(dictEntries); + for (byte[] dictEntry: serDict) { + Bytes.writeByteArray(out, dictEntry); + } + serDict = null; // done, make go away for great justice. + + // now write things out: + for (Result r : results) { + if (r.isEmpty()) { + Bytes.writeByteArray(out, new byte[0]); continue; } - out.writeInt(result.size()); - for(KeyValue kv : result.raw()) { - out.writeInt(kv.getLength()); - out.write(kv.getBuffer(), kv.getOffset(), kv.getLength()); + // write out the rowid. + KeyValue first = r.raw()[0]; + Bytes.writeByteArray(out, first.getBuffer(), first.getRowOffset(), first.getRowLength()); + WritableUtils.writeVInt(out, r.size()); + + for (KeyValue kv : r.raw()) { + int family = dict.get(kv.getFamily()); + int qual = dict.get(kv.getQualifier()); + WritableUtils.writeVInt(out, family); + WritableUtils.writeVInt(out, qual); + out.writeLong(kv.getTimestamp()); + // write value + Bytes.writeByteArray(out, kv.getBuffer(), kv.getValueOffset(), kv.getValueLength()); } } } @@ -629,27 +667,40 @@ public class Result implements Writable, WritableWithSize { return new Result[0]; } Result [] results = new Result[numResults]; - int bufSize = in.readInt(); - byte [] buf = new byte[bufSize]; - int offset = 0; - for(int i=0;i dict = new HashMap(); + for (int i = 1; i <= numDictEntries; i++) { + byte[] entry = Bytes.readByteArray(in); + dict.put(i, entry); + } + + // now have dictionary. + for (int i = 0 ; i < numResults; i++) { + byte [] rowKey = Bytes.readByteArray(in); + if (rowKey.length == 0) { + results[i] = new Result((ImmutableBytesWritable) null); continue; } - int initialOffset = offset; - for(int j=0;j warnResponseSize) { LOG.warn(getName()+", responseTooLarge for: "+call+": Size: " diff --git a/src/test/java/org/apache/hadoop/hbase/TestSerialization.java b/src/test/java/org/apache/hadoop/hbase/TestSerialization.java index befcdaf..ca8de34 100644 --- a/src/test/java/org/apache/hadoop/hbase/TestSerialization.java +++ b/src/test/java/org/apache/hadoop/hbase/TestSerialization.java @@ -434,8 +434,9 @@ public class TestSerialization { KeyValue kvA = new KeyValue(rowA, famA, qfA, valueA); KeyValue kvB = new KeyValue(rowB, famB, qfB, valueB); + KeyValue kbA1 = new KeyValue(rowA, famB, qfA, valueB); - Result result1 = new Result(new KeyValue[]{kvA, kvB}); + Result result1 = new Result(new KeyValue[]{kvA, kbA1}); Result result2 = new Result(new KeyValue[]{kvB}); Result result3 = new Result(new KeyValue[]{kvB});