Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java (revision 1426728) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java (working copy) @@ -18533,10 +18533,10 @@ public interface ActionResultOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional .NameBytesPair value = 1; + // optional .Result value = 1; boolean hasValue(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue(); - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder(); // optional .NameBytesPair exception = 2; boolean hasException(); @@ -18572,16 +18572,16 @@ } private int bitField0_; - // optional .NameBytesPair value = 1; + // optional .Result value = 1; public static final int VALUE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value_; public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue() { return value_; } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder() { return value_; } @@ -18599,7 +18599,7 @@ } private void initFields() { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @@ -18818,7 +18818,7 @@ public Builder clear() { super.clear(); if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); } else { valueBuilder_.clear(); } @@ -18949,7 +18949,7 @@ break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(); if (hasValue()) { subBuilder.mergeFrom(getValue()); } @@ -18972,21 +18972,21 @@ private int bitField0_; - // optional .NameBytesPair value = 1; - private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + // optional .Result value = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_; + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> valueBuilder_; public boolean hasValue() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue() { if (valueBuilder_ == null) { return value_; } else { return valueBuilder_.getMessage(); } } - public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -19000,7 +19000,7 @@ return this; } public Builder setValue( - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { if (valueBuilder_ == null) { value_ = builderForValue.build(); onChanged(); @@ -19010,12 +19010,12 @@ bitField0_ |= 0x00000001; return this; } - public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { if (valueBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + value_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { value_ = - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(value_).mergeFrom(value).buildPartial(); } else { value_ = value; } @@ -19028,7 +19028,7 @@ } public Builder clearValue() { if (valueBuilder_ == null) { - value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); onChanged(); } else { valueBuilder_.clear(); @@ -19036,12 +19036,12 @@ bitField0_ = (bitField0_ & ~0x00000001); return this; } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getValueBuilder() { bitField0_ |= 0x00000001; onChanged(); return getValueFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder() { if (valueBuilder_ != null) { return valueBuilder_.getMessageOrBuilder(); } else { @@ -19049,11 +19049,11 @@ } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getValueFieldBuilder() { if (valueBuilder_ == null) { valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( value_, getParentForChildren(), isClean()); @@ -21479,25 +21479,25 @@ "oprocessorServiceResponse\022 \n\006region\030\001 \002(" + "\0132\020.RegionSpecifier\022\035\n\005value\030\002 \002(\0132\016.Nam" + "eBytesPair\"9\n\013MultiAction\022\027\n\006mutate\030\001 \001(" + - "\0132\007.Mutate\022\021\n\003get\030\002 \001(\0132\004.Get\"P\n\014ActionR" + - "esult\022\035\n\005value\030\001 \001(\0132\016.NameBytesPair\022!\n\t", - "exception\030\002 \001(\0132\016.NameBytesPair\"^\n\014Multi" + - "Request\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" + - "r\022\034\n\006action\030\002 \003(\0132\014.MultiAction\022\016\n\006atomi" + - "c\030\003 \001(\010\".\n\rMultiResponse\022\035\n\006result\030\001 \003(\013" + - "2\r.ActionResult2\223\003\n\rClientService\022 \n\003get" + - "\022\013.GetRequest\032\014.GetResponse\022)\n\006mutate\022\016." + - "MutateRequest\032\017.MutateResponse\022#\n\004scan\022\014" + - ".ScanRequest\032\r.ScanResponse\022,\n\007lockRow\022\017" + - ".LockRowRequest\032\020.LockRowResponse\0222\n\tunl" + - "ockRow\022\021.UnlockRowRequest\032\022.UnlockRowRes", - "ponse\022>\n\rbulkLoadHFile\022\025.BulkLoadHFileRe" + - "quest\032\026.BulkLoadHFileResponse\022F\n\013execSer" + - "vice\022\032.CoprocessorServiceRequest\032\033.Copro" + - "cessorServiceResponse\022&\n\005multi\022\r.MultiRe" + - "quest\032\016.MultiResponseBB\n*org.apache.hado" + - "op.hbase.protobuf.generatedB\014ClientProto" + - "sH\001\210\001\001\240\001\001" + "\0132\007.Mutate\022\021\n\003get\030\002 \001(\0132\004.Get\"I\n\014ActionR" + + "esult\022\026\n\005value\030\001 \001(\0132\007.Result\022!\n\texcepti", + "on\030\002 \001(\0132\016.NameBytesPair\"^\n\014MultiRequest" + + "\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\034\n\006ac" + + "tion\030\002 \003(\0132\014.MultiAction\022\016\n\006atomic\030\003 \001(\010" + + "\".\n\rMultiResponse\022\035\n\006result\030\001 \003(\0132\r.Acti" + + "onResult2\223\003\n\rClientService\022 \n\003get\022\013.GetR" + + "equest\032\014.GetResponse\022)\n\006mutate\022\016.MutateR" + + "equest\032\017.MutateResponse\022#\n\004scan\022\014.ScanRe" + + "quest\032\r.ScanResponse\022,\n\007lockRow\022\017.LockRo" + + "wRequest\032\020.LockRowResponse\0222\n\tunlockRow\022" + + "\021.UnlockRowRequest\032\022.UnlockRowResponse\022>", + "\n\rbulkLoadHFile\022\025.BulkLoadHFileRequest\032\026" + + ".BulkLoadHFileResponse\022F\n\013execService\022\032." + + "CoprocessorServiceRequest\032\033.CoprocessorS" + + "erviceResponse\022&\n\005multi\022\r.MultiRequest\032\016" + + ".MultiResponseBB\n*org.apache.hadoop.hbas" + + "e.protobuf.generatedB\014ClientProtosH\001\210\001\001\240" + + "\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { Index: hbase-protocol/src/main/protobuf/Client.proto =================================================================== --- hbase-protocol/src/main/protobuf/Client.proto (revision 1426728) +++ hbase-protocol/src/main/protobuf/Client.proto (working copy) @@ -291,7 +291,7 @@ * is returned as a stringified parameter. */ message ActionResult { - optional NameBytesPair value = 1; + optional Result value = 1; optional NameBytesPair exception = 2; } Index: hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java (revision 1426728) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java (working copy) @@ -1,565 +0,0 @@ -/** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.io; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.DataInput; -import java.io.DataInputStream; -import java.io.DataOutput; -import java.io.DataOutputStream; -import java.io.IOException; -import java.io.Serializable; -import java.lang.reflect.Array; -import java.util.ArrayList; -import java.util.List; -import java.util.NavigableSet; - -import junit.framework.TestCase; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.ClusterStatus; -import org.apache.hadoop.hbase.DeserializationException; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.SmallTests; -import org.apache.hadoop.hbase.client.Action; -import org.apache.hadoop.hbase.client.Append; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Increment; -import org.apache.hadoop.hbase.client.MultiAction; -import org.apache.hadoop.hbase.client.MultiResponse; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Row; -import org.apache.hadoop.hbase.client.RowMutations; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.BitComparator; -import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; -import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; -import org.apache.hadoop.hbase.filter.ColumnRangeFilter; -import org.apache.hadoop.hbase.filter.CompareFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.filter.DependentColumnFilter; -import org.apache.hadoop.hbase.filter.Filter; -import org.apache.hadoop.hbase.filter.FilterBase; -import org.apache.hadoop.hbase.filter.FilterList; -import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; -import org.apache.hadoop.hbase.filter.InclusiveStopFilter; -import org.apache.hadoop.hbase.filter.KeyOnlyFilter; -import org.apache.hadoop.hbase.filter.PageFilter; -import org.apache.hadoop.hbase.filter.PrefixFilter; -import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.filter.RandomRowFilter; -import org.apache.hadoop.hbase.filter.RowFilter; -import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; -import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; -import org.apache.hadoop.hbase.filter.SkipFilter; -import org.apache.hadoop.hbase.filter.ValueFilter; -import org.apache.hadoop.hbase.filter.WhileMatchFilter; -import org.apache.hadoop.hbase.filter.ByteArrayComparable; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; -import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.RegionOpeningState; -import org.apache.hadoop.hbase.regionserver.wal.HLog; -import org.apache.hadoop.hbase.regionserver.wal.HLogKey; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.MapWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparator; -import org.junit.Assert; -import org.junit.experimental.categories.Category; - -import com.google.common.collect.Lists; -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; - -@Category(SmallTests.class) -public class TestHbaseObjectWritable extends TestCase { - - @Override - protected void setUp() throws Exception { - super.setUp(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - } - - @SuppressWarnings("boxing") - public void testReadOldObjectDataInput() throws IOException { - Configuration conf = HBaseConfiguration.create(); - /* - * This is the code used to generate byte[] where - * HbaseObjectWritable used byte for code - * - ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); - DataOutputStream out = new DataOutputStream(byteStream); - HbaseObjectWritable.writeObject(out, bytes, byte[].class, conf); - byte[] ba = byteStream.toByteArray(); - out.close(); - */ - - /* - * byte array generated by the folowing call - * HbaseObjectWritable.writeObject(out, new Text("Old"), Text.class, conf); - */ - byte[] baForText = {13, 13, 3, 79, 108, 100}; - Text txt = (Text)readByteArray(conf, baForText); - Text oldTxt = new Text("Old"); - assertEquals(txt, oldTxt); - - final byte A = 'A'; - byte [] bytes = new byte[1]; - bytes[0] = A; - /* - * byte array generated by the folowing call - * HbaseObjectWritable.writeObject(out, bytes, byte[].class, conf); - */ - byte[] baForByteArray = { 11, 1, 65 }; - byte[] baOut = (byte[])readByteArray(conf, baForByteArray); - assertTrue(Bytes.equals(baOut, bytes)); - } - - /* - * helper method which reads byte array using HbaseObjectWritable.readObject() - */ - private Object readByteArray(final Configuration conf, final byte[] ba) - throws IOException { - ByteArrayInputStream bais = - new ByteArrayInputStream(ba); - DataInputStream dis = new DataInputStream(bais); - Object product = HbaseObjectWritable.readObject(dis, conf); - dis.close(); - return product; - } - - @SuppressWarnings("boxing") - public void testReadObjectDataInputConfiguration() throws IOException { - Configuration conf = HBaseConfiguration.create(); - // Do primitive type - final int COUNT = 101; - assertTrue(doType(conf, COUNT, int.class).equals(COUNT)); - // Do array - final byte [] testing = "testing".getBytes(); - byte [] result = (byte [])doType(conf, testing, testing.getClass()); - assertTrue(WritableComparator.compareBytes(testing, 0, testing.length, - result, 0, result.length) == 0); - // Do unsupported type. - boolean exception = false; - try { - doType(conf, new Object(), Object.class); - } catch (UnsupportedOperationException uoe) { - exception = true; - } - assertTrue(exception); - // Try odd types - final byte A = 'A'; - byte [] bytes = new byte[1]; - bytes[0] = A; - Object obj = doType(conf, bytes, byte [].class); - assertTrue(((byte [])obj)[0] == A); - // Do 'known' Writable type. - obj = doType(conf, new Text(""), Text.class); - assertTrue(obj instanceof Text); - //List.class - List list = new ArrayList(); - list.add("hello"); - list.add("world"); - list.add("universe"); - obj = doType(conf, list, List.class); - assertTrue(obj instanceof List); - Assert.assertArrayEquals(list.toArray(), ((List)obj).toArray() ); - //List.class with null values - List listWithNulls = new ArrayList(); - listWithNulls.add("hello"); - listWithNulls.add("world"); - listWithNulls.add(null); - obj = doType(conf, listWithNulls, List.class); - assertTrue(obj instanceof List); - Assert.assertArrayEquals(listWithNulls.toArray(), ((List)obj).toArray() ); - //ArrayList.class - ArrayList arr = new ArrayList(); - arr.add("hello"); - arr.add("world"); - arr.add("universe"); - obj = doType(conf, arr, ArrayList.class); - assertTrue(obj instanceof ArrayList); - Assert.assertArrayEquals(list.toArray(), ((ArrayList)obj).toArray() ); - // Check that filters can be serialized - obj = - ProtobufUtil.toFilter(ProtobufUtil.toFilter(new PrefixFilter(HConstants.EMPTY_BYTE_ARRAY))); - assertTrue(obj instanceof PrefixFilter); - } - - public void testCustomWritable() throws Exception { - Configuration conf = HBaseConfiguration.create(); - - // test proper serialization of un-encoded custom writables - CustomWritable custom = new CustomWritable("test phrase"); - Object obj = doType(conf, custom, CustomWritable.class); - assertTrue(obj instanceof Writable); - assertTrue(obj instanceof CustomWritable); - assertEquals("test phrase", ((CustomWritable)obj).getValue()); - } - - public void testCustomFilter() throws Exception { - // test proper serialization of a custom filter - CustomFilter filt = new CustomFilter("mykey"); - FilterList filtlist = new FilterList(FilterList.Operator.MUST_PASS_ALL); - filtlist.addFilter(filt); - Filter obj = ProtobufUtil.toFilter(ProtobufUtil.toFilter(filtlist)); - assertTrue(obj instanceof FilterList); - assertNotNull(((FilterList)obj).getFilters()); - assertEquals(1, ((FilterList)obj).getFilters().size()); - Filter child = ((FilterList)obj).getFilters().get(0); - assertTrue(child instanceof CustomFilter); - assertEquals("mykey", ((CustomFilter)child).getKey()); - } - - public void testCustomSerializable() throws Exception { - Configuration conf = HBaseConfiguration.create(); - - // test proper serialization of un-encoded serialized java objects - CustomSerializable custom = new CustomSerializable("test phrase"); - Object obj = doType(conf, custom, CustomSerializable.class); - assertTrue(obj instanceof Serializable); - assertTrue(obj instanceof CustomSerializable); - assertEquals("test phrase", ((CustomSerializable)obj).getValue()); - } - - private Object doType(final Configuration conf, final Object value, - final Class clazz) - throws IOException { - ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); - DataOutputStream out = new DataOutputStream(byteStream); - HbaseObjectWritable.writeObject(out, value, clazz, conf); - out.close(); - ByteArrayInputStream bais = - new ByteArrayInputStream(byteStream.toByteArray()); - DataInputStream dis = new DataInputStream(bais); - Object product = HbaseObjectWritable.readObject(dis, conf); - dis.close(); - return product; - } - - public static class A extends IntWritable { - public A() {} - public A(int a) {super(a);} - } - - public static class B extends A { - int b; - public B() { } - public B(int a, int b) { - super(a); - this.b = b; - } - @Override - public void write(DataOutput out) throws IOException { - super.write(out); - out.writeInt(b); - } - - @Override - public void readFields(DataInput in) throws IOException { - super.readFields(in); - this.b = in.readInt(); - } - @Override - public boolean equals(Object o) { - if (o instanceof B) { - return this.get() == ((B) o).get() && this.b == ((B) o).b; - } - return false; - } - } - - /** Tests for serialization of List and Arrays */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - public void testPolymorphismInSequences() throws Exception { - Configuration conf = HBaseConfiguration.create(); - Object ret; - - //test with lists - List list = Lists.newArrayList(new A(42), new B(10, 100)); - ret = doType(conf, list, list.getClass()); - assertEquals(ret, list); - - //test with Writable[] - Writable[] warr = new Writable[] {new A(42), new B(10, 100)}; - ret = doType(conf, warr, warr.getClass()); - Assert.assertArrayEquals((Writable[])ret, warr); - - //test with arrays - A[] arr = new A[] {new A(42), new B(10, 100)}; - ret = doType(conf, arr, arr.getClass()); - Assert.assertArrayEquals((A[])ret, arr); - - //test with double array - A[][] darr = new A[][] {new A[] { new A(42), new B(10, 100)}, new A[] {new A(12)}}; - ret = doType(conf, darr, darr.getClass()); - Assert.assertArrayEquals((A[][])ret, darr); - - //test with List of arrays - List larr = Lists.newArrayList(arr, new A[] {new A(99)}); - ret = doType(conf, larr, larr.getClass()); - List lret = (List) ret; - assertEquals(larr.size(), lret.size()); - for (int i=0; i permissions = createPermissions(); + byte [] bytes = writePermissionsAsBytes(permissions, conf); + AccessControlLists.readPermissions(bytes, conf); + } + + /** + * Writes a set of permissions as {@link org.apache.hadoop.io.Writable} instances + * and returns the resulting byte array. Used to verify we can read stuff written + * with Writable. + */ + public static byte[] writePermissionsAsBytes(ListMultimap perms, + Configuration conf) { + try { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + writePermissions(new DataOutputStream(bos), perms, conf); + return bos.toByteArray(); + } catch (IOException ioe) { + // shouldn't happen here + throw new RuntimeException("Error serializing permissions", ioe); + } + } + + /** + * Writes a set of permissions as {@link org.apache.hadoop.io.Writable} instances + * to the given output stream. + * @param out + * @param perms + * @param conf + * @throws IOException + */ + public static void writePermissions(DataOutput out, + ListMultimap perms, Configuration conf) + throws IOException { + Set keys = perms.keySet(); + out.writeInt(keys.size()); + for (String key : keys) { + Text.writeString(out, key); + HbaseObjectWritableFor96Migration.writeObject(out, perms.get(key), List.class, conf); + } + } + + + @Test public void testBasicWrite() throws Exception { Configuration conf = UTIL.getConfiguration(); // add some permissions @@ -243,6 +296,16 @@ @Test public void testSerialization() throws Exception { Configuration conf = UTIL.getConfiguration(); + ListMultimap permissions = createPermissions(); + byte[] permsData = AccessControlLists.writePermissionsAsBytes(permissions, conf); + + ListMultimap copy = + AccessControlLists.readPermissions(permsData, conf); + + checkMultimapEqual(permissions, copy); + } + + private ListMultimap createPermissions() { ListMultimap permissions = ArrayListMultimap.create(); permissions.put("george", new TablePermission(TEST_TABLE, null, TablePermission.Action.READ)); @@ -252,13 +315,7 @@ TablePermission.Action.READ)); permissions.put("hubert", new TablePermission(TEST_TABLE2, null, TablePermission.Action.READ, TablePermission.Action.WRITE)); - - byte[] permsData = AccessControlLists.writePermissionsAsBytes(permissions, conf); - - ListMultimap copy = - AccessControlLists.readPermissions(permsData, conf); - - checkMultimapEqual(permissions, copy); + return permissions; } public void checkMultimapEqual(ListMultimap first, Index: hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java (revision 1426728) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java (working copy) @@ -521,17 +521,6 @@ } */ - @Test public void testTimeRange() throws Exception{ - TimeRange tr = new TimeRange(0,5); - byte [] mb = Writables.getBytes(tr); - TimeRange deserializedTr = - (TimeRange)Writables.getWritable(mb, new TimeRange()); - - assertEquals(tr.getMax(), deserializedTr.getMax()); - assertEquals(tr.getMin(), deserializedTr.getMin()); - - } - protected static final int MAXVERSIONS = 3; protected final static byte [] fam1 = Bytes.toBytes("colfamily1"); protected final static byte [] fam2 = Bytes.toBytes("colfamily2"); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java (working copy) @@ -132,8 +132,7 @@ LOG.debug("Updating permissions cache from node "+table+" with data: "+ Bytes.toStringBinary(nodeData)); } - authManager.refreshCacheFromWritable(Bytes.toBytes(table), - nodeData); + authManager.refreshCacheFromWritable(Bytes.toBytes(table), nodeData); } catch (IOException ioe) { LOG.error("Failed parsing permissions for table '" + table + "' from zk", ioe); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java (working copy) @@ -18,9 +18,17 @@ package org.apache.hadoop.hbase.security.access; -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; +import java.io.ByteArrayInputStream; +import java.io.DataInput; +import java.io.DataInputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -37,26 +45,23 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.QualifierFilter; +import org.apache.hadoop.hbase.filter.RegexStringComparator; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.filter.RegexStringComparator; -import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.Text; -import java.io.ByteArrayInputStream; -import java.io.DataInput; -import java.io.DataInputStream; -import java.io.IOException; -import java.util.*; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.ListMultimap; +import com.google.protobuf.InvalidProtocolBufferException; /** * Maintains lists of permission grants to users and groups to allow for @@ -510,7 +515,8 @@ * * Writes a set of permission [user: table permission] */ - public static byte[] writePermissionsAsBytes(ListMultimap perms, Configuration conf) { + public static byte[] writePermissionsAsBytes(ListMultimap perms, + Configuration conf) { return ProtobufUtil.prependPBMagic(ProtobufUtil.toUserTablePermissions(perms).toByteArray()); } @@ -519,7 +525,8 @@ * from the input stream. */ public static ListMultimap readPermissions(byte[] data, - Configuration conf) throws DeserializationException { + Configuration conf) + throws DeserializationException { if (ProtobufUtil.isPBMagicPrefix(data)) { int pblen = ProtobufUtil.lengthOfPBMagic(); try { @@ -537,7 +544,8 @@ int length = in.readInt(); for (int i=0; i userPerms = (List)HbaseObjectWritable.readObject(in, conf); + List userPerms = + (List)HbaseObjectWritableFor96Migration.readObject(in, conf); perms.putAll(user, userPerms); } } catch (IOException e) { Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java (revision 1426729) @@ -0,0 +1,812 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security.access; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.lang.reflect.Array; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.NavigableSet; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.hbase.ClusterStatus; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Action; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.MultiAction; +import org.apache.hadoop.hbase.client.MultiResponse; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.RowMutations; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.BitComparator; +import org.apache.hadoop.hbase.filter.ByteArrayComparable; +import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; +import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.ColumnRangeFilter; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.DependentColumnFilter; +import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; +import org.apache.hadoop.hbase.filter.InclusiveStopFilter; +import org.apache.hadoop.hbase.filter.KeyOnlyFilter; +import org.apache.hadoop.hbase.filter.PageFilter; +import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.filter.QualifierFilter; +import org.apache.hadoop.hbase.filter.RandomRowFilter; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; +import org.apache.hadoop.hbase.filter.SkipFilter; +import org.apache.hadoop.hbase.filter.ValueFilter; +import org.apache.hadoop.hbase.filter.WhileMatchFilter; +import org.apache.hadoop.hbase.io.DataOutputOutputStream; +import org.apache.hadoop.hbase.io.WritableWithSize; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.regionserver.RegionOpeningState; +import org.apache.hadoop.hbase.regionserver.wal.HLog; +import org.apache.hadoop.hbase.regionserver.wal.HLogKey; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ProtoUtil; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.ObjectWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableFactories; +import org.apache.hadoop.io.WritableUtils; + +import com.google.protobuf.Message; +import com.google.protobuf.RpcController; + +/** + *

This is a customized version of the polymorphic hadoop + * {@link ObjectWritable}. It removes UTF8 (HADOOP-414). + * Using {@link Text} intead of UTF-8 saves ~2% CPU between reading and writing + * objects running a short sequentialWrite Performance Evaluation test just in + * ObjectWritable alone; more when we're doing randomRead-ing. Other + * optimizations include our passing codes for classes instead of the + * actual class names themselves. This makes it so this class needs amendment + * if non-Writable classes are introduced -- if passed a Writable for which we + * have no code, we just do the old-school passing of the class name, etc. -- + * but passing codes the savings are large particularly when cell + * data is small (If < a couple of kilobytes, the encoding/decoding of class + * name and reflection to instantiate class was costing in excess of the cell + * handling). + * @deprecated This class is needed migrating TablePermissions written with + * Writables. It is needed to read old permissions written pre-0.96. This + * class is to be removed after HBase 0.96 ships since then all permissions + * will have been migrated and written with protobufs. + */ +@Deprecated +@InterfaceAudience.Private +class HbaseObjectWritableFor96Migration implements Writable, WritableWithSize, Configurable { + protected final static Log LOG = LogFactory.getLog(HbaseObjectWritableFor96Migration.class); + + // Here we maintain two static maps of classes to code and vice versa. + // Add new classes+codes as wanted or figure way to auto-generate these + // maps. + static final Map> CODE_TO_CLASS = + new HashMap>(); + static final Map, Integer> CLASS_TO_CODE = + new HashMap, Integer>(); + // Special code that means 'not-encoded'; in this case we do old school + // sending of the class name using reflection, etc. + private static final byte NOT_ENCODED = 0; + //Generic array means that the array type is not one of the pre-defined arrays + //in the CLASS_TO_CODE map, but we have to still encode the array since it's + //elements are serializable by this class. + private static final int GENERIC_ARRAY_CODE; + private static final int NEXT_CLASS_CODE; + static { + //////////////////////////////////////////////////////////////////////////// + // WARNING: Please do not insert, remove or swap any line in this static // + // block. Doing so would change or shift all the codes used to serialize // + // objects, which makes backwards compatibility very hard for clients. // + // New codes should always be added at the end. Code removal is // + // discouraged because code is a short now. // + //////////////////////////////////////////////////////////////////////////// + + int code = NOT_ENCODED + 1; + // Primitive types. + addToMap(Boolean.TYPE, code++); + addToMap(Byte.TYPE, code++); + addToMap(Character.TYPE, code++); + addToMap(Short.TYPE, code++); + addToMap(Integer.TYPE, code++); + addToMap(Long.TYPE, code++); + addToMap(Float.TYPE, code++); + addToMap(Double.TYPE, code++); + addToMap(Void.TYPE, code++); + + // Other java types + addToMap(String.class, code++); + addToMap(byte [].class, code++); + addToMap(byte [][].class, code++); + + // Hadoop types + addToMap(Text.class, code++); + addToMap(Writable.class, code++); + addToMap(Writable [].class, code++); + code++; // Removed + addToMap(NullInstance.class, code++); + + // Hbase types + addToMap(HColumnDescriptor.class, code++); + addToMap(HConstants.Modify.class, code++); + + // We used to have a class named HMsg but its been removed. Rather than + // just axe it, use following random Integer class -- we just chose any + // class from java.lang -- instead just so codes that follow stay + // in same relative place. + addToMap(Integer.class, code++); + addToMap(Integer[].class, code++); + + //HRegion shouldn't be pushed across the wire. + code++; //addToMap(HRegion.class, code++); + code++; //addToMap(HRegion[].class, code++); + + addToMap(HRegionInfo.class, code++); + addToMap(HRegionInfo[].class, code++); + code++; // Removed + code++; // Removed + addToMap(HTableDescriptor.class, code++); + addToMap(MapWritable.class, code++); + + // + // HBASE-880 + // + addToMap(ClusterStatus.class, code++); + addToMap(Delete.class, code++); + addToMap(Get.class, code++); + addToMap(KeyValue.class, code++); + addToMap(KeyValue[].class, code++); + addToMap(Put.class, code++); + addToMap(Put[].class, code++); + addToMap(Result.class, code++); + addToMap(Result[].class, code++); + addToMap(Scan.class, code++); + + addToMap(WhileMatchFilter.class, code++); + addToMap(PrefixFilter.class, code++); + addToMap(PageFilter.class, code++); + addToMap(InclusiveStopFilter.class, code++); + addToMap(ColumnCountGetFilter.class, code++); + addToMap(SingleColumnValueFilter.class, code++); + addToMap(SingleColumnValueExcludeFilter.class, code++); + addToMap(BinaryComparator.class, code++); + addToMap(BitComparator.class, code++); + addToMap(CompareFilter.class, code++); + addToMap(RowFilter.class, code++); + addToMap(ValueFilter.class, code++); + addToMap(QualifierFilter.class, code++); + addToMap(SkipFilter.class, code++); + addToMap(ByteArrayComparable.class, code++); + addToMap(FirstKeyOnlyFilter.class, code++); + addToMap(DependentColumnFilter.class, code++); + + addToMap(Delete [].class, code++); + + addToMap(HLog.Entry.class, code++); + addToMap(HLog.Entry[].class, code++); + addToMap(HLogKey.class, code++); + + addToMap(List.class, code++); + + addToMap(NavigableSet.class, code++); + addToMap(ColumnPrefixFilter.class, code++); + + // Multi + addToMap(Row.class, code++); + addToMap(Action.class, code++); + addToMap(MultiAction.class, code++); + addToMap(MultiResponse.class, code++); + + // coprocessor execution + // Exec no longer exists --> addToMap(Exec.class, code++); + code++; + addToMap(Increment.class, code++); + + addToMap(KeyOnlyFilter.class, code++); + + // serializable + addToMap(Serializable.class, code++); + + addToMap(RandomRowFilter.class, code++); + + addToMap(CompareOp.class, code++); + + addToMap(ColumnRangeFilter.class, code++); + + // HServerLoad no longer exists; increase code so other classes stay the same. + code++; + //addToMap(HServerLoad.class, code++); + + addToMap(RegionOpeningState.class, code++); + + addToMap(HTableDescriptor[].class, code++); + + addToMap(Append.class, code++); + + addToMap(RowMutations.class, code++); + + addToMap(Message.class, code++); + + //java.lang.reflect.Array is a placeholder for arrays not defined above + GENERIC_ARRAY_CODE = code++; + addToMap(Array.class, GENERIC_ARRAY_CODE); + + addToMap(RpcController.class, code++); + + // make sure that this is the last statement in this static block + NEXT_CLASS_CODE = code; + } + + private Class declaredClass; + private Object instance; + private Configuration conf; + + /** default constructor for writable */ + HbaseObjectWritableFor96Migration() { + super(); + } + + /** + * @param instance + */ + HbaseObjectWritableFor96Migration(Object instance) { + set(instance); + } + + /** + * @param declaredClass + * @param instance + */ + HbaseObjectWritableFor96Migration(Class declaredClass, Object instance) { + this.declaredClass = declaredClass; + this.instance = instance; + } + + /** @return the instance, or null if none. */ + Object get() { return instance; } + + /** @return the class this is meant to be. */ + Class getDeclaredClass() { return declaredClass; } + + /** + * Reset the instance. + * @param instance + */ + void set(Object instance) { + this.declaredClass = instance.getClass(); + this.instance = instance; + } + + /** + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "OW[class=" + declaredClass + ",value=" + instance + "]"; + } + + + public void readFields(DataInput in) throws IOException { + readObject(in, this, this.conf); + } + + public void write(DataOutput out) throws IOException { + writeObject(out, instance, declaredClass, conf); + } + + public long getWritableSize() { + return getWritableSize(instance, declaredClass, conf); + } + + private static class NullInstance extends Configured implements Writable { + Class declaredClass; + /** default constructor for writable */ + @SuppressWarnings("unused") + public NullInstance() { super(null); } + + /** + * @param declaredClass + * @param conf + */ + public NullInstance(Class declaredClass, Configuration conf) { + super(conf); + this.declaredClass = declaredClass; + } + + public void readFields(DataInput in) throws IOException { + this.declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); + } + + public void write(DataOutput out) throws IOException { + writeClassCode(out, this.declaredClass); + } + } + + static Integer getClassCode(final Class c) + throws IOException { + Integer code = CLASS_TO_CODE.get(c); + if (code == null ) { + if (List.class.isAssignableFrom(c)) { + code = CLASS_TO_CODE.get(List.class); + } else if (Writable.class.isAssignableFrom(c)) { + code = CLASS_TO_CODE.get(Writable.class); + } else if (c.isArray()) { + code = CLASS_TO_CODE.get(Array.class); + } else if (Message.class.isAssignableFrom(c)) { + code = CLASS_TO_CODE.get(Message.class); + } else if (Serializable.class.isAssignableFrom(c)){ + code = CLASS_TO_CODE.get(Serializable.class); + } else if (Scan.class.isAssignableFrom(c)) { + code = CLASS_TO_CODE.get(Scan.class); + } + } + return code; + } + + /** + * @return the next object code in the list. Used in testing to verify that additional fields are not added + */ + static int getNextClassCode(){ + return NEXT_CLASS_CODE; + } + + /** + * Write out the code for passed Class. + * @param out + * @param c + * @throws IOException + */ + static void writeClassCode(final DataOutput out, final Class c) + throws IOException { + Integer code = getClassCode(c); + + if (code == null) { + LOG.error("Unsupported type " + c); + StackTraceElement[] els = new Exception().getStackTrace(); + for(StackTraceElement elem : els) { + LOG.error(elem.getMethodName()); + } + throw new UnsupportedOperationException("No code for unexpected " + c); + } + WritableUtils.writeVInt(out, code); + } + + static long getWritableSize(Object instance, Class declaredClass, + Configuration conf) { + return 0L; // no hint is the default. + } + /** + * Write a {@link Writable}, {@link String}, primitive type, or an array of + * the preceding. + * @param out + * @param instance + * @param declaredClass + * @param conf + * @throws IOException + */ + @SuppressWarnings("unchecked") + static void writeObject(DataOutput out, Object instance, + Class declaredClass, + Configuration conf) + throws IOException { + + Object instanceObj = instance; + Class declClass = declaredClass; + + if (instanceObj == null) { // null + instanceObj = new NullInstance(declClass, conf); + declClass = Writable.class; + } + writeClassCode(out, declClass); + if (declClass.isArray()) { // array + // If bytearray, just dump it out -- avoid the recursion and + // byte-at-a-time we were previously doing. + if (declClass.equals(byte [].class)) { + Bytes.writeByteArray(out, (byte [])instanceObj); + } else { + //if it is a Generic array, write the element's type + if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) { + Class componentType = declaredClass.getComponentType(); + writeClass(out, componentType); + } + + int length = Array.getLength(instanceObj); + out.writeInt(length); + for (int i = 0; i < length; i++) { + Object item = Array.get(instanceObj, i); + writeObject(out, item, + item.getClass(), conf); + } + } + } else if (List.class.isAssignableFrom(declClass)) { + List list = (List)instanceObj; + int length = list.size(); + out.writeInt(length); + for (int i = 0; i < length; i++) { + Object elem = list.get(i); + writeObject(out, elem, + elem == null ? Writable.class : elem.getClass(), conf); + } + } else if (declClass == String.class) { // String + Text.writeString(out, (String)instanceObj); + } else if (declClass.isPrimitive()) { // primitive type + if (declClass == Boolean.TYPE) { // boolean + out.writeBoolean(((Boolean)instanceObj).booleanValue()); + } else if (declClass == Character.TYPE) { // char + out.writeChar(((Character)instanceObj).charValue()); + } else if (declClass == Byte.TYPE) { // byte + out.writeByte(((Byte)instanceObj).byteValue()); + } else if (declClass == Short.TYPE) { // short + out.writeShort(((Short)instanceObj).shortValue()); + } else if (declClass == Integer.TYPE) { // int + out.writeInt(((Integer)instanceObj).intValue()); + } else if (declClass == Long.TYPE) { // long + out.writeLong(((Long)instanceObj).longValue()); + } else if (declClass == Float.TYPE) { // float + out.writeFloat(((Float)instanceObj).floatValue()); + } else if (declClass == Double.TYPE) { // double + out.writeDouble(((Double)instanceObj).doubleValue()); + } else if (declClass == Void.TYPE) { // void + } else { + throw new IllegalArgumentException("Not a primitive: "+declClass); + } + } else if (declClass.isEnum()) { // enum + Text.writeString(out, ((Enum)instanceObj).name()); + } else if (Message.class.isAssignableFrom(declaredClass)) { + Text.writeString(out, instanceObj.getClass().getName()); + ((Message)instance).writeDelimitedTo( + DataOutputOutputStream.constructOutputStream(out)); + } else if (Writable.class.isAssignableFrom(declClass)) { // Writable + Class c = instanceObj.getClass(); + Integer code = CLASS_TO_CODE.get(c); + if (code == null) { + out.writeByte(NOT_ENCODED); + Text.writeString(out, c.getName()); + } else { + writeClassCode(out, c); + } + ((Writable)instanceObj).write(out); + } else if (Serializable.class.isAssignableFrom(declClass)) { + Class c = instanceObj.getClass(); + Integer code = CLASS_TO_CODE.get(c); + if (code == null) { + out.writeByte(NOT_ENCODED); + Text.writeString(out, c.getName()); + } else { + writeClassCode(out, c); + } + ByteArrayOutputStream bos = null; + ObjectOutputStream oos = null; + try{ + bos = new ByteArrayOutputStream(); + oos = new ObjectOutputStream(bos); + oos.writeObject(instanceObj); + byte[] value = bos.toByteArray(); + out.writeInt(value.length); + out.write(value); + } finally { + if(bos!=null) bos.close(); + if(oos!=null) oos.close(); + } + } else if (Scan.class.isAssignableFrom(declClass)) { + Scan scan = (Scan)instanceObj; + byte [] scanBytes = ProtobufUtil.toScan(scan).toByteArray(); + out.writeInt(scanBytes.length); + out.write(scanBytes); + } else { + throw new IOException("Can't write: "+instanceObj+" as "+declClass); + } + } + + /** Writes the encoded class code as defined in CLASS_TO_CODE, or + * the whole class name if not defined in the mapping. + */ + static void writeClass(DataOutput out, Class c) throws IOException { + Integer code = CLASS_TO_CODE.get(c); + if (code == null) { + WritableUtils.writeVInt(out, NOT_ENCODED); + Text.writeString(out, c.getName()); + } else { + WritableUtils.writeVInt(out, code); + } + } + + /** Reads and returns the class as written by {@link #writeClass(DataOutput, Class)} */ + static Class readClass(Configuration conf, DataInput in) throws IOException { + Class instanceClass = null; + int b = (byte)WritableUtils.readVInt(in); + if (b == NOT_ENCODED) { + String className = Text.readString(in); + try { + instanceClass = getClassByName(conf, className); + } catch (ClassNotFoundException e) { + LOG.error("Can't find class " + className, e); + throw new IOException("Can't find class " + className, e); + } + } else { + instanceClass = CODE_TO_CLASS.get(b); + } + return instanceClass; + } + + /** + * Read a {@link Writable}, {@link String}, primitive type, or an array of + * the preceding. + * @param in + * @param conf + * @return the object + * @throws IOException + */ + static Object readObject(DataInput in, Configuration conf) + throws IOException { + return readObject(in, null, conf); + } + + /** + * Read a {@link Writable}, {@link String}, primitive type, or an array of + * the preceding. + * @param in + * @param objectWritable + * @param conf + * @return the object + * @throws IOException + */ + @SuppressWarnings("unchecked") + static Object readObject(DataInput in, + HbaseObjectWritableFor96Migration objectWritable, Configuration conf) + throws IOException { + Class declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); + Object instance; + if (declaredClass.isPrimitive()) { // primitive types + if (declaredClass == Boolean.TYPE) { // boolean + instance = Boolean.valueOf(in.readBoolean()); + } else if (declaredClass == Character.TYPE) { // char + instance = Character.valueOf(in.readChar()); + } else if (declaredClass == Byte.TYPE) { // byte + instance = Byte.valueOf(in.readByte()); + } else if (declaredClass == Short.TYPE) { // short + instance = Short.valueOf(in.readShort()); + } else if (declaredClass == Integer.TYPE) { // int + instance = Integer.valueOf(in.readInt()); + } else if (declaredClass == Long.TYPE) { // long + instance = Long.valueOf(in.readLong()); + } else if (declaredClass == Float.TYPE) { // float + instance = Float.valueOf(in.readFloat()); + } else if (declaredClass == Double.TYPE) { // double + instance = Double.valueOf(in.readDouble()); + } else if (declaredClass == Void.TYPE) { // void + instance = null; + } else { + throw new IllegalArgumentException("Not a primitive: "+declaredClass); + } + } else if (declaredClass.isArray()) { // array + if (declaredClass.equals(byte [].class)) { + instance = Bytes.readByteArray(in); + } else { + int length = in.readInt(); + instance = Array.newInstance(declaredClass.getComponentType(), length); + for (int i = 0; i < length; i++) { + Array.set(instance, i, readObject(in, conf)); + } + } + } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE + Class componentType = readClass(conf, in); + int length = in.readInt(); + instance = Array.newInstance(componentType, length); + for (int i = 0; i < length; i++) { + Array.set(instance, i, readObject(in, conf)); + } + } else if (List.class.isAssignableFrom(declaredClass)) { // List + int length = in.readInt(); + instance = new ArrayList(length); + for (int i = 0; i < length; i++) { + ((ArrayList)instance).add(readObject(in, conf)); + } + } else if (declaredClass == String.class) { // String + instance = Text.readString(in); + } else if (declaredClass.isEnum()) { // enum + instance = Enum.valueOf((Class) declaredClass, + Text.readString(in)); + } else if (declaredClass == Message.class) { + String className = Text.readString(in); + try { + declaredClass = getClassByName(conf, className); + instance = tryInstantiateProtobuf(declaredClass, in); + } catch (ClassNotFoundException e) { + LOG.error("Can't find class " + className, e); + throw new IOException("Can't find class " + className, e); + } + } else if (Scan.class.isAssignableFrom(declaredClass)) { + int length = in.readInt(); + byte [] scanBytes = new byte[length]; + in.readFully(scanBytes); + ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder(); + instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build()); + } else { // Writable or Serializable + Class instanceClass = null; + int b = (byte)WritableUtils.readVInt(in); + if (b == NOT_ENCODED) { + String className = Text.readString(in); + try { + instanceClass = getClassByName(conf, className); + } catch (ClassNotFoundException e) { + LOG.error("Can't find class " + className, e); + throw new IOException("Can't find class " + className, e); + } + } else { + instanceClass = CODE_TO_CLASS.get(b); + } + if(Writable.class.isAssignableFrom(instanceClass)){ + Writable writable = WritableFactories.newInstance(instanceClass, conf); + try { + writable.readFields(in); + } catch (Exception e) { + LOG.error("Error in readFields", e); + throw new IOException("Error in readFields" , e); + } + instance = writable; + if (instanceClass == NullInstance.class) { // null + declaredClass = ((NullInstance)instance).declaredClass; + instance = null; + } + } else { + int length = in.readInt(); + byte[] objectBytes = new byte[length]; + in.readFully(objectBytes); + ByteArrayInputStream bis = null; + ObjectInputStream ois = null; + try { + bis = new ByteArrayInputStream(objectBytes); + ois = new ObjectInputStream(bis); + instance = ois.readObject(); + } catch (ClassNotFoundException e) { + LOG.error("Class not found when attempting to deserialize object", e); + throw new IOException("Class not found when attempting to " + + "deserialize object", e); + } finally { + if(bis!=null) bis.close(); + if(ois!=null) ois.close(); + } + } + } + if (objectWritable != null) { // store values + objectWritable.declaredClass = declaredClass; + objectWritable.instance = instance; + } + return instance; + } + + /** + * Try to instantiate a protocol buffer of the given message class + * from the given input stream. + * + * @param protoClass the class of the generated protocol buffer + * @param dataIn the input stream to read from + * @return the instantiated Message instance + * @throws IOException if an IO problem occurs + */ + static Message tryInstantiateProtobuf( + Class protoClass, + DataInput dataIn) throws IOException { + + try { + if (dataIn instanceof InputStream) { + // We can use the built-in parseDelimitedFrom and not have to re-copy + // the data + Method parseMethod = getStaticProtobufMethod(protoClass, + "parseDelimitedFrom", InputStream.class); + return (Message)parseMethod.invoke(null, (InputStream)dataIn); + } else { + // Have to read it into a buffer first, since protobuf doesn't deal + // with the DataInput interface directly. + + // Read the size delimiter that writeDelimitedTo writes + int size = ProtoUtil.readRawVarint32(dataIn); + if (size < 0) { + throw new IOException("Invalid size: " + size); + } + + byte[] data = new byte[size]; + dataIn.readFully(data); + Method parseMethod = getStaticProtobufMethod(protoClass, + "parseFrom", byte[].class); + return (Message)parseMethod.invoke(null, data); + } + } catch (InvocationTargetException e) { + + if (e.getCause() instanceof IOException) { + throw (IOException)e.getCause(); + } else { + throw new IOException(e.getCause()); + } + } catch (IllegalAccessException iae) { + throw new AssertionError("Could not access parse method in " + + protoClass); + } + } + + static Method getStaticProtobufMethod(Class declaredClass, String method, + Class ... args) { + + try { + return declaredClass.getMethod(method, args); + } catch (Exception e) { + // This is a bug in Hadoop - protobufs should all have this static method + throw new AssertionError("Protocol buffer class " + declaredClass + + " does not have an accessible parseFrom(InputStream) method!"); + } + } + + @SuppressWarnings("unchecked") + private static Class getClassByName(Configuration conf, String className) + throws ClassNotFoundException { + if(conf != null) { + return conf.getClassByName(className); + } + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + if(cl == null) { + cl = HbaseObjectWritableFor96Migration.class.getClassLoader(); + } + return Class.forName(className, true, cl); + } + + private static void addToMap(final Class clazz, final int code) { + CLASS_TO_CODE.put(clazz, code); + CODE_TO_CLASS.put(code, clazz); + } + + public void setConf(Configuration conf) { + this.conf = conf; + } + + public Configuration getConf() { + return this.conf; + } +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java (revision 1426729) @@ -0,0 +1,77 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security.access; + +import java.util.*; + +import org.apache.hadoop.classification.InterfaceAudience; + +/** + * A Static Interface. + * Instead of having this code in the the HbaseMapWritable code, where it + * blocks the possibility of altering the variables and changing their types, + * it is put here in this static interface where the static final Maps are + * loaded one time. Only byte[] and Cell are supported at this time. + * @deprecated In place until we come up on 0.96 and then it can be removed + * along with {@link HbaseObjectWritableFor96Migration}; needed to read + * pre-0.96 TablePermissions. + */ +@Deprecated +@InterfaceAudience.Private +interface CodeToClassAndBackFor96Migration { + /** + * Static map that contains mapping from code to class + */ + static final Map> CODE_TO_CLASS = + new HashMap>(); + + /** + * Static map that contains mapping from class to code + */ + static final Map, Byte> CLASS_TO_CODE = + new HashMap, Byte>(); + + /** + * Class list for supported classes + */ + Class[] classList = {byte[].class}; + + /** + * The static loader that is used instead of the static constructor in + * HbaseMapWritable. + */ + InternalStaticLoader sl = + new InternalStaticLoader(classList, CODE_TO_CLASS, CLASS_TO_CODE); + + /** + * Class that loads the static maps with their values. + */ + class InternalStaticLoader{ + InternalStaticLoader(Class[] classList, + Map> CODE_TO_CLASS, Map, Byte> CLASS_TO_CODE){ + byte code = 1; + for(int i=0; i(); private volatile boolean started = false; - // For generated protocol classes which doesn't have VERSION field - private static final Map, Long> - PROTOCOL_VERSION = new HashMap, Long>(); - private static final Map> PROTOCOL_CACHE = new ConcurrentHashMap>(); @@ -263,8 +258,6 @@ protected int highPriorityLevel; // what level a high priority call is at - private volatile int responseQueueLen; // size of response queue for this server - protected final List connectionList = Collections.synchronizedList(new LinkedList()); //maintain a list @@ -1000,7 +993,6 @@ return true; } if (!call.response.hasRemaining()) { - responseQueueLen--; call.connection.decRpcCount(); //noinspection RedundantIfStatement if (numElements == 1) { // last call fully processes. @@ -1070,7 +1062,6 @@ void doRespond(Call call) throws IOException { // set the serve time when the response has to be sent later call.timestamp = System.currentTimeMillis(); - responseQueueLen++; boolean doRegister = false; synchronized (call.connection.responseQueue) { Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java (working copy) @@ -70,4 +70,4 @@ } return result; } -} +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java (working copy) @@ -18,27 +18,21 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcChannel; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import java.io.IOException; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; -import java.io.IOException; +import com.google.protobuf.ByteString; +import com.google.protobuf.Descriptors; +import com.google.protobuf.Message; -import static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; - /** * Provides clients with an RPC connection to call coprocessor endpoint {@link com.google.protobuf.Service}s * against the active master. An instance of this class may be obtained Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (working copy) @@ -1337,7 +1337,6 @@ * @param exception the relevant exception * @return an exception to throw */ - @SuppressWarnings({"ThrowableInstanceNeverThrown"}) protected IOException wrapException(InetSocketAddress addr, IOException exception) { if (exception instanceof ConnectException) { Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java (working copy) @@ -70,4 +70,4 @@ * @throws IOException */ public void endDelayThrowing(Throwable t) throws IOException; -} +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java (working copy) @@ -36,10 +36,9 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.security.HBasePolicyProvider; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; -import org.apache.hadoop.hbase.util.Bytes; import org.codehaus.jackson.map.ObjectMapper; import com.google.protobuf.Message; Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java (working copy) @@ -24,7 +24,15 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.MasterAdminProtocol; +import org.apache.hadoop.hbase.MasterMonitorProtocol; +import org.apache.hadoop.hbase.RegionServerStatusProtocol; +import org.apache.hadoop.hbase.client.AdminProtocol; +import org.apache.hadoop.hbase.client.ClientProtocol; +import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; +import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.ipc.RemoteException; @@ -34,6 +42,7 @@ import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.InetSocketAddress; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -76,6 +85,21 @@ final private int rpcTimeout; private final long clientProtocolVersion; + // For generated protocol classes which don't have VERSION field, + // such as protobuf interfaces. + static final Map, Long> + PROTOCOL_VERSION = new HashMap, Long>(); + static { + PROTOCOL_VERSION.put(ClientService.BlockingInterface.class, + Long.valueOf(ClientProtocol.VERSION)); + PROTOCOL_VERSION.put(AdminService.BlockingInterface.class, + Long.valueOf(AdminProtocol.VERSION)); + PROTOCOL_VERSION.put(RegionServerStatusService.BlockingInterface.class, + Long.valueOf(RegionServerStatusProtocol.VERSION)); + PROTOCOL_VERSION.put(MasterMonitorProtocol.class,Long.valueOf(MasterMonitorProtocol.VERSION)); + PROTOCOL_VERSION.put(MasterAdminProtocol.class,Long.valueOf(MasterAdminProtocol.VERSION)); + } + public Invoker(Class protocol, InetSocketAddress addr, User ticket, Configuration conf, SocketFactory factory, int rpcTimeout) throws IOException { @@ -84,7 +108,7 @@ this.ticket = ticket; this.client = CLIENTS.getClient(conf, factory); this.rpcTimeout = rpcTimeout; - Long version = Invocation.PROTOCOL_VERSION.get(protocol); + Long version = PROTOCOL_VERSION.get(protocol); if (version != null) { this.clientProtocolVersion = version; } else { Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java (working copy) @@ -19,14 +19,11 @@ package org.apache.hadoop.hbase.ipc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; @InterfaceAudience.Private public class MetricsHBaseServer { - private static Log LOG = LogFactory.getLog(MetricsHBaseServer.class); private MetricsHBaseServerSource source; public MetricsHBaseServer(String serverName, MetricsHBaseServerWrapper wrapper) { @@ -69,4 +66,4 @@ public MetricsHBaseServerSource getMetricsSource() { return source; } -} +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownProtocolException.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownProtocolException.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownProtocolException.java (working copy) @@ -24,6 +24,7 @@ /** * An error requesting an RPC protocol that the server is not serving. */ +@SuppressWarnings("serial") public class UnknownProtocolException extends DoNotRetryIOException { private Class protocol; @@ -44,4 +45,4 @@ public Class getProtocol() { return protocol; } -} +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientCache.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientCache.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientCache.java (working copy) @@ -27,8 +27,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; -import org.apache.hadoop.io.Writable; /** * Cache a client using its socket factory as the hash key. Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java (working copy) @@ -25,11 +25,8 @@ * but is only used for logging on the server side, etc. */ public class CallerDisconnectedException extends IOException { + private static final long serialVersionUID = 1L; public CallerDisconnectedException(String msg) { super(msg); } - - private static final long serialVersionUID = 1L; - - } Index: hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java (working copy) @@ -1,219 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.ipc; - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.lang.reflect.Field; -import java.lang.reflect.Method; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.client.AdminProtocol; -import org.apache.hadoop.hbase.client.ClientProtocol; -import org.apache.hadoop.hbase.MasterMonitorProtocol; -import org.apache.hadoop.hbase.MasterAdminProtocol; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; -import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; -import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService; -import org.apache.hadoop.hbase.RegionServerStatusProtocol; -import org.apache.hadoop.io.VersionMismatchException; -import org.apache.hadoop.io.VersionedWritable; - -/** A method invocation, including the method name and its parameters.*/ -@InterfaceAudience.Private -public class Invocation extends VersionedWritable implements Configurable { - protected String methodName; - @SuppressWarnings("rawtypes") - protected Class[] parameterClasses; - protected Object[] parameters; - protected Configuration conf; - private long clientVersion; - private int clientMethodsHash; - - // For generated protocol classes which don't have VERSION field, - // such as protobuf interfaces. - static final Map, Long> - PROTOCOL_VERSION = new HashMap, Long>(); - - static { - PROTOCOL_VERSION.put(ClientService.BlockingInterface.class, - Long.valueOf(ClientProtocol.VERSION)); - PROTOCOL_VERSION.put(AdminService.BlockingInterface.class, - Long.valueOf(AdminProtocol.VERSION)); - PROTOCOL_VERSION.put(RegionServerStatusService.BlockingInterface.class, - Long.valueOf(RegionServerStatusProtocol.VERSION)); - PROTOCOL_VERSION.put(MasterMonitorProtocol.class,Long.valueOf(MasterMonitorProtocol.VERSION)); - PROTOCOL_VERSION.put(MasterAdminProtocol.class,Long.valueOf(MasterAdminProtocol.VERSION)); - } - - // For protobuf protocols, which use ServiceException, instead of IOException - protected static final Set> - PROTOBUF_PROTOCOLS = new HashSet>(); - - static { - PROTOBUF_PROTOCOLS.add(ClientProtocol.class); - PROTOBUF_PROTOCOLS.add(AdminProtocol.class); - PROTOBUF_PROTOCOLS.add(RegionServerStatusProtocol.class); - PROTOBUF_PROTOCOLS.add(MasterMonitorProtocol.class); - PROTOBUF_PROTOCOLS.add(MasterAdminProtocol.class); - } - - private static byte RPC_VERSION = 1; - - public Invocation() {} - - public Invocation(Method method, Object[] parameters) { - this.methodName = method.getName(); - this.parameterClasses = method.getParameterTypes(); - this.parameters = parameters; - Class declaringClass = method.getDeclaringClass(); - if (declaringClass.equals(VersionedProtocol.class)) { - //VersionedProtocol is exempted from version check. - clientVersion = 0; - clientMethodsHash = 0; - } else { - try { - Long version = PROTOCOL_VERSION.get(declaringClass); - if (version != null) { - this.clientVersion = version.longValue(); - } else { - Field versionField = declaringClass.getField("VERSION"); - versionField.setAccessible(true); - this.clientVersion = versionField.getLong(declaringClass); - } - } catch (NoSuchFieldException ex) { - throw new RuntimeException("The " + declaringClass, ex); - } catch (IllegalAccessException ex) { - throw new RuntimeException(ex); - } - this.clientMethodsHash = ProtocolSignature.getFingerprint( - declaringClass.getMethods()); - } - } - - /** @return The name of the method invoked. */ - public String getMethodName() { return methodName; } - - /** @return The parameter classes. */ - @SuppressWarnings({ "rawtypes" }) - public Class[] getParameterClasses() { return parameterClasses; } - - /** @return The parameter instances. */ - public Object[] getParameters() { return parameters; } - - long getProtocolVersion() { - return clientVersion; - } - - protected int getClientMethodsHash() { - return clientMethodsHash; - } - - /** - * Returns the rpc version used by the client. - * @return rpcVersion - */ - public long getRpcVersion() { - return RPC_VERSION; - } - - public void readFields(DataInput in) throws IOException { - try { - super.readFields(in); - methodName = in.readUTF(); - clientVersion = in.readLong(); - clientMethodsHash = in.readInt(); - } catch (VersionMismatchException e) { - // VersionMismatchException doesn't provide an API to access - // expectedVersion and foundVersion. This is really sad. - if (e.toString().endsWith("found v0")) { - // Try to be a bit backwards compatible. In previous versions of - // HBase (before HBASE-3939 in 0.92) Invocation wasn't a - // VersionedWritable and thus the first thing on the wire was always - // the 2-byte length of the method name. Because no method name is - // longer than 255 characters, and all method names are in ASCII, - // The following code is equivalent to `in.readUTF()', which we can't - // call again here, because `super.readFields(in)' already consumed - // the first byte of input, which can't be "unread" back into `in'. - final short len = (short) (in.readByte() & 0xFF); // Unsigned byte. - final byte[] buf = new byte[len]; - in.readFully(buf, 0, len); - methodName = new String(buf); - } - } - parameters = new Object[in.readInt()]; - parameterClasses = new Class[parameters.length]; - HbaseObjectWritable objectWritable = new HbaseObjectWritable(); - for (int i = 0; i < parameters.length; i++) { - parameters[i] = HbaseObjectWritable.readObject(in, objectWritable, - this.conf); - parameterClasses[i] = objectWritable.getDeclaredClass(); - } - } - - public void write(DataOutput out) throws IOException { - super.write(out); - out.writeUTF(this.methodName); - out.writeLong(clientVersion); - out.writeInt(clientMethodsHash); - out.writeInt(parameterClasses.length); - for (int i = 0; i < parameterClasses.length; i++) { - HbaseObjectWritable.writeObject(out, parameters[i], parameterClasses[i], - conf); - } - } - - @Override - public String toString() { - StringBuilder buffer = new StringBuilder(256); - buffer.append(methodName); - buffer.append("("); - for (int i = 0; i < parameters.length; i++) { - if (i != 0) - buffer.append(", "); - buffer.append(parameters[i]); - } - buffer.append(")"); - buffer.append(", rpc version="+RPC_VERSION); - buffer.append(", client version="+clientVersion); - buffer.append(", methodsFingerPrint="+clientMethodsHash); - return buffer.toString(); - } - - public void setConf(Configuration conf) { - this.conf = conf; - } - - public Configuration getConf() { - return this.conf; - } - - @Override - public byte getVersion() { - return RPC_VERSION; - } -} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java (working copy) @@ -148,6 +148,4 @@ public String toString() { return "[" + minimumTimestamp + "," + maximumTimestamp + "]"; } - } - Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (working copy) @@ -101,11 +101,9 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler; import org.apache.hadoop.hbase.ipc.HBaseClientRPC; import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler; import org.apache.hadoop.hbase.ipc.HBaseServerRPC; -import org.apache.hadoop.hbase.ipc.MetricsHBaseServer; import org.apache.hadoop.hbase.ipc.ProtocolSignature; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; @@ -163,7 +161,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor; -import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; @@ -3191,7 +3188,7 @@ for (ClientProtos.MultiAction actionUnion : request.getActionList()) { requestCount.increment(); try { - Object result = null; + ClientProtos.Result result = null; if (actionUnion.hasGet()) { Get get = ProtobufUtil.toGet(actionUnion.getGet()); Integer lock = getLockFromId(get.getLockId()); @@ -3242,8 +3239,7 @@ } else { resultBuilder.clear(); } - NameBytesPair value = ProtobufUtil.toParameter(result); - resultBuilder.setValue(value); + resultBuilder.setValue(result); builder.addResult(resultBuilder.build()); } } catch (IOException ie) { @@ -3757,10 +3753,8 @@ boolean batchContainsPuts = false, batchContainsDelete = false; try { ActionResult.Builder resultBuilder = ActionResult.newBuilder(); - NameBytesPair value = ProtobufUtil.toParameter(ClientProtos.Result.newBuilder().build()); - resultBuilder.setValue(value); + resultBuilder.setValue(ClientProtos.Result.newBuilder().build()); ActionResult result = resultBuilder.build(); - int i = 0; for (Mutate m : mutates) { Mutation mutation = null; Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java (working copy) @@ -19,12 +19,8 @@ package org.apache.hadoop.hbase.io; -import java.io.DataInput; -import java.io.DataOutput; import java.io.IOException; -import org.apache.hadoop.io.Writable; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; @@ -39,7 +35,7 @@ */ @InterfaceAudience.Public @InterfaceStability.Stable -public class TimeRange implements Writable { +public class TimeRange { private long minStamp = 0L; private long maxStamp = Long.MAX_VALUE; private boolean allTime = false; @@ -184,17 +180,4 @@ sb.append(this.minStamp); return sb.toString(); } - - //Writable - public void readFields(final DataInput in) throws IOException { - this.minStamp = in.readLong(); - this.maxStamp = in.readLong(); - this.allTime = in.readBoolean(); - } - - public void write(final DataOutput out) throws IOException { - out.writeLong(minStamp); - out.writeLong(maxStamp); - out.writeBoolean(this.allTime); - } -} +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java (working copy) @@ -21,7 +21,6 @@ import java.io.BufferedInputStream; import java.io.DataInput; import java.io.DataInputStream; -import java.io.DataOutput; import java.io.IOException; import java.io.InputStream; @@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FSProtos; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.Writable; import com.google.protobuf.ByteString; @@ -56,7 +54,7 @@ * references. References are cleaned up by compactions. */ @InterfaceAudience.Private -public class Reference implements Writable { +public class Reference { private byte [] splitkey; private Range region; @@ -99,7 +97,6 @@ /** * Used by serializations. - * @deprecated Use the pb serializations instead. Writables are going away. */ @Deprecated // Make this private when it comes time to let go of this constructor. Needed by pb serialization. @@ -130,18 +127,14 @@ return "" + this.region; } - /** - * @deprecated Writables are going away. Use the pb serialization methods instead. - */ - @Deprecated - public void write(DataOutput out) throws IOException { - // Write true if we're doing top of the file. - out.writeBoolean(isTopFileRegion(this.region)); - Bytes.writeByteArray(out, this.splitkey); + public static boolean isTopFileRegion(final Range r) { + return r.equals(Range.top); } /** * @deprecated Writables are going away. Use the pb serialization methods instead. + * Remove in a release after 0.96 goes out. This is here only to migrate + * old Reference files written with Writables before 0.96. */ @Deprecated public void readFields(DataInput in) throws IOException { @@ -151,10 +144,6 @@ this.splitkey = Bytes.readByteArray(in); } - public static boolean isTopFileRegion(final Range r) { - return r.equals(Range.top); - } - public Path write(final FileSystem fs, final Path p) throws IOException { FSDataOutputStream out = fs.create(p, false); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (working copy) @@ -1,806 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.io; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.InputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.lang.reflect.Array; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.NavigableSet; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.hbase.ClusterStatus; -import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.client.Action; -import org.apache.hadoop.hbase.client.Append; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Increment; -import org.apache.hadoop.hbase.client.MultiAction; -import org.apache.hadoop.hbase.client.MultiResponse; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.Row; -import org.apache.hadoop.hbase.client.RowMutations; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.BitComparator; -import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; -import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; -import org.apache.hadoop.hbase.filter.ColumnRangeFilter; -import org.apache.hadoop.hbase.filter.CompareFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.filter.DependentColumnFilter; -import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; -import org.apache.hadoop.hbase.filter.InclusiveStopFilter; -import org.apache.hadoop.hbase.filter.KeyOnlyFilter; -import org.apache.hadoop.hbase.filter.PageFilter; -import org.apache.hadoop.hbase.filter.PrefixFilter; -import org.apache.hadoop.hbase.filter.QualifierFilter; -import org.apache.hadoop.hbase.filter.RandomRowFilter; -import org.apache.hadoop.hbase.filter.RowFilter; -import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; -import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; -import org.apache.hadoop.hbase.filter.SkipFilter; -import org.apache.hadoop.hbase.filter.ValueFilter; -import org.apache.hadoop.hbase.filter.WhileMatchFilter; -import org.apache.hadoop.hbase.filter.ByteArrayComparable; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.RegionOpeningState; -import org.apache.hadoop.hbase.regionserver.wal.HLog; -import org.apache.hadoop.hbase.regionserver.wal.HLogKey; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.ProtoUtil; -import org.apache.hadoop.io.MapWritable; -import org.apache.hadoop.io.ObjectWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableFactories; -import org.apache.hadoop.io.WritableUtils; - -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; - -/** - * This is a customized version of the polymorphic hadoop - * {@link ObjectWritable}. It removes UTF8 (HADOOP-414). - * Using {@link Text} intead of UTF-8 saves ~2% CPU between reading and writing - * objects running a short sequentialWrite Performance Evaluation test just in - * ObjectWritable alone; more when we're doing randomRead-ing. Other - * optimizations include our passing codes for classes instead of the - * actual class names themselves. This makes it so this class needs amendment - * if non-Writable classes are introduced -- if passed a Writable for which we - * have no code, we just do the old-school passing of the class name, etc. -- - * but passing codes the savings are large particularly when cell - * data is small (If < a couple of kilobytes, the encoding/decoding of class - * name and reflection to instantiate class was costing in excess of the cell - * handling). - */ -@InterfaceAudience.Private -public class HbaseObjectWritable implements Writable, WritableWithSize, Configurable { - protected final static Log LOG = LogFactory.getLog(HbaseObjectWritable.class); - - // Here we maintain two static maps of classes to code and vice versa. - // Add new classes+codes as wanted or figure way to auto-generate these - // maps. - static final Map> CODE_TO_CLASS = - new HashMap>(); - static final Map, Integer> CLASS_TO_CODE = - new HashMap, Integer>(); - // Special code that means 'not-encoded'; in this case we do old school - // sending of the class name using reflection, etc. - private static final byte NOT_ENCODED = 0; - //Generic array means that the array type is not one of the pre-defined arrays - //in the CLASS_TO_CODE map, but we have to still encode the array since it's - //elements are serializable by this class. - private static final int GENERIC_ARRAY_CODE; - private static final int NEXT_CLASS_CODE; - static { - //////////////////////////////////////////////////////////////////////////// - // WARNING: Please do not insert, remove or swap any line in this static // - // block. Doing so would change or shift all the codes used to serialize // - // objects, which makes backwards compatibility very hard for clients. // - // New codes should always be added at the end. Code removal is // - // discouraged because code is a short now. // - //////////////////////////////////////////////////////////////////////////// - - int code = NOT_ENCODED + 1; - // Primitive types. - addToMap(Boolean.TYPE, code++); - addToMap(Byte.TYPE, code++); - addToMap(Character.TYPE, code++); - addToMap(Short.TYPE, code++); - addToMap(Integer.TYPE, code++); - addToMap(Long.TYPE, code++); - addToMap(Float.TYPE, code++); - addToMap(Double.TYPE, code++); - addToMap(Void.TYPE, code++); - - // Other java types - addToMap(String.class, code++); - addToMap(byte [].class, code++); - addToMap(byte [][].class, code++); - - // Hadoop types - addToMap(Text.class, code++); - addToMap(Writable.class, code++); - addToMap(Writable [].class, code++); - code++; // Removed - addToMap(NullInstance.class, code++); - - // Hbase types - addToMap(HColumnDescriptor.class, code++); - addToMap(HConstants.Modify.class, code++); - - // We used to have a class named HMsg but its been removed. Rather than - // just axe it, use following random Integer class -- we just chose any - // class from java.lang -- instead just so codes that follow stay - // in same relative place. - addToMap(Integer.class, code++); - addToMap(Integer[].class, code++); - - //HRegion shouldn't be pushed across the wire. - code++; //addToMap(HRegion.class, code++); - code++; //addToMap(HRegion[].class, code++); - - addToMap(HRegionInfo.class, code++); - addToMap(HRegionInfo[].class, code++); - code++; // Removed - code++; // Removed - addToMap(HTableDescriptor.class, code++); - addToMap(MapWritable.class, code++); - - // - // HBASE-880 - // - addToMap(ClusterStatus.class, code++); - addToMap(Delete.class, code++); - addToMap(Get.class, code++); - addToMap(KeyValue.class, code++); - addToMap(KeyValue[].class, code++); - addToMap(Put.class, code++); - addToMap(Put[].class, code++); - addToMap(Result.class, code++); - addToMap(Result[].class, code++); - addToMap(Scan.class, code++); - - addToMap(WhileMatchFilter.class, code++); - addToMap(PrefixFilter.class, code++); - addToMap(PageFilter.class, code++); - addToMap(InclusiveStopFilter.class, code++); - addToMap(ColumnCountGetFilter.class, code++); - addToMap(SingleColumnValueFilter.class, code++); - addToMap(SingleColumnValueExcludeFilter.class, code++); - addToMap(BinaryComparator.class, code++); - addToMap(BitComparator.class, code++); - addToMap(CompareFilter.class, code++); - addToMap(RowFilter.class, code++); - addToMap(ValueFilter.class, code++); - addToMap(QualifierFilter.class, code++); - addToMap(SkipFilter.class, code++); - addToMap(ByteArrayComparable.class, code++); - addToMap(FirstKeyOnlyFilter.class, code++); - addToMap(DependentColumnFilter.class, code++); - - addToMap(Delete [].class, code++); - - addToMap(HLog.Entry.class, code++); - addToMap(HLog.Entry[].class, code++); - addToMap(HLogKey.class, code++); - - addToMap(List.class, code++); - - addToMap(NavigableSet.class, code++); - addToMap(ColumnPrefixFilter.class, code++); - - // Multi - addToMap(Row.class, code++); - addToMap(Action.class, code++); - addToMap(MultiAction.class, code++); - addToMap(MultiResponse.class, code++); - - // coprocessor execution - // Exec no longer exists --> addToMap(Exec.class, code++); - code++; - addToMap(Increment.class, code++); - - addToMap(KeyOnlyFilter.class, code++); - - // serializable - addToMap(Serializable.class, code++); - - addToMap(RandomRowFilter.class, code++); - - addToMap(CompareOp.class, code++); - - addToMap(ColumnRangeFilter.class, code++); - - // HServerLoad no longer exists; increase code so other classes stay the same. - code++; - //addToMap(HServerLoad.class, code++); - - addToMap(RegionOpeningState.class, code++); - - addToMap(HTableDescriptor[].class, code++); - - addToMap(Append.class, code++); - - addToMap(RowMutations.class, code++); - - addToMap(Message.class, code++); - - //java.lang.reflect.Array is a placeholder for arrays not defined above - GENERIC_ARRAY_CODE = code++; - addToMap(Array.class, GENERIC_ARRAY_CODE); - - addToMap(RpcController.class, code++); - - // make sure that this is the last statement in this static block - NEXT_CLASS_CODE = code; - } - - private Class declaredClass; - private Object instance; - private Configuration conf; - - /** default constructor for writable */ - public HbaseObjectWritable() { - super(); - } - - /** - * @param instance - */ - public HbaseObjectWritable(Object instance) { - set(instance); - } - - /** - * @param declaredClass - * @param instance - */ - public HbaseObjectWritable(Class declaredClass, Object instance) { - this.declaredClass = declaredClass; - this.instance = instance; - } - - /** @return the instance, or null if none. */ - public Object get() { return instance; } - - /** @return the class this is meant to be. */ - public Class getDeclaredClass() { return declaredClass; } - - /** - * Reset the instance. - * @param instance - */ - public void set(Object instance) { - this.declaredClass = instance.getClass(); - this.instance = instance; - } - - /** - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "OW[class=" + declaredClass + ",value=" + instance + "]"; - } - - - public void readFields(DataInput in) throws IOException { - readObject(in, this, this.conf); - } - - public void write(DataOutput out) throws IOException { - writeObject(out, instance, declaredClass, conf); - } - - public long getWritableSize() { - return getWritableSize(instance, declaredClass, conf); - } - - private static class NullInstance extends Configured implements Writable { - Class declaredClass; - /** default constructor for writable */ - @SuppressWarnings("unused") - public NullInstance() { super(null); } - - /** - * @param declaredClass - * @param conf - */ - public NullInstance(Class declaredClass, Configuration conf) { - super(conf); - this.declaredClass = declaredClass; - } - - public void readFields(DataInput in) throws IOException { - this.declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); - } - - public void write(DataOutput out) throws IOException { - writeClassCode(out, this.declaredClass); - } - } - - public static Integer getClassCode(final Class c) - throws IOException { - Integer code = CLASS_TO_CODE.get(c); - if (code == null ) { - if (List.class.isAssignableFrom(c)) { - code = CLASS_TO_CODE.get(List.class); - } else if (Writable.class.isAssignableFrom(c)) { - code = CLASS_TO_CODE.get(Writable.class); - } else if (c.isArray()) { - code = CLASS_TO_CODE.get(Array.class); - } else if (Message.class.isAssignableFrom(c)) { - code = CLASS_TO_CODE.get(Message.class); - } else if (Serializable.class.isAssignableFrom(c)){ - code = CLASS_TO_CODE.get(Serializable.class); - } else if (Scan.class.isAssignableFrom(c)) { - code = CLASS_TO_CODE.get(Scan.class); - } - } - return code; - } - - /** - * @return the next object code in the list. Used in testing to verify that additional fields are not added - */ - static int getNextClassCode(){ - return NEXT_CLASS_CODE; - } - - /** - * Write out the code for passed Class. - * @param out - * @param c - * @throws IOException - */ - static void writeClassCode(final DataOutput out, final Class c) - throws IOException { - Integer code = getClassCode(c); - - if (code == null) { - LOG.error("Unsupported type " + c); - StackTraceElement[] els = new Exception().getStackTrace(); - for(StackTraceElement elem : els) { - LOG.error(elem.getMethodName()); - } - throw new UnsupportedOperationException("No code for unexpected " + c); - } - WritableUtils.writeVInt(out, code); - } - - public static long getWritableSize(Object instance, Class declaredClass, - Configuration conf) { - return 0L; // no hint is the default. - } - /** - * Write a {@link Writable}, {@link String}, primitive type, or an array of - * the preceding. - * @param out - * @param instance - * @param declaredClass - * @param conf - * @throws IOException - */ - @SuppressWarnings("unchecked") - public static void writeObject(DataOutput out, Object instance, - Class declaredClass, - Configuration conf) - throws IOException { - - Object instanceObj = instance; - Class declClass = declaredClass; - - if (instanceObj == null) { // null - instanceObj = new NullInstance(declClass, conf); - declClass = Writable.class; - } - writeClassCode(out, declClass); - if (declClass.isArray()) { // array - // If bytearray, just dump it out -- avoid the recursion and - // byte-at-a-time we were previously doing. - if (declClass.equals(byte [].class)) { - Bytes.writeByteArray(out, (byte [])instanceObj); - } else { - //if it is a Generic array, write the element's type - if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) { - Class componentType = declaredClass.getComponentType(); - writeClass(out, componentType); - } - - int length = Array.getLength(instanceObj); - out.writeInt(length); - for (int i = 0; i < length; i++) { - Object item = Array.get(instanceObj, i); - writeObject(out, item, - item.getClass(), conf); - } - } - } else if (List.class.isAssignableFrom(declClass)) { - List list = (List)instanceObj; - int length = list.size(); - out.writeInt(length); - for (int i = 0; i < length; i++) { - Object elem = list.get(i); - writeObject(out, elem, - elem == null ? Writable.class : elem.getClass(), conf); - } - } else if (declClass == String.class) { // String - Text.writeString(out, (String)instanceObj); - } else if (declClass.isPrimitive()) { // primitive type - if (declClass == Boolean.TYPE) { // boolean - out.writeBoolean(((Boolean)instanceObj).booleanValue()); - } else if (declClass == Character.TYPE) { // char - out.writeChar(((Character)instanceObj).charValue()); - } else if (declClass == Byte.TYPE) { // byte - out.writeByte(((Byte)instanceObj).byteValue()); - } else if (declClass == Short.TYPE) { // short - out.writeShort(((Short)instanceObj).shortValue()); - } else if (declClass == Integer.TYPE) { // int - out.writeInt(((Integer)instanceObj).intValue()); - } else if (declClass == Long.TYPE) { // long - out.writeLong(((Long)instanceObj).longValue()); - } else if (declClass == Float.TYPE) { // float - out.writeFloat(((Float)instanceObj).floatValue()); - } else if (declClass == Double.TYPE) { // double - out.writeDouble(((Double)instanceObj).doubleValue()); - } else if (declClass == Void.TYPE) { // void - } else { - throw new IllegalArgumentException("Not a primitive: "+declClass); - } - } else if (declClass.isEnum()) { // enum - Text.writeString(out, ((Enum)instanceObj).name()); - } else if (Message.class.isAssignableFrom(declaredClass)) { - Text.writeString(out, instanceObj.getClass().getName()); - ((Message)instance).writeDelimitedTo( - DataOutputOutputStream.constructOutputStream(out)); - } else if (Writable.class.isAssignableFrom(declClass)) { // Writable - Class c = instanceObj.getClass(); - Integer code = CLASS_TO_CODE.get(c); - if (code == null) { - out.writeByte(NOT_ENCODED); - Text.writeString(out, c.getName()); - } else { - writeClassCode(out, c); - } - ((Writable)instanceObj).write(out); - } else if (Serializable.class.isAssignableFrom(declClass)) { - Class c = instanceObj.getClass(); - Integer code = CLASS_TO_CODE.get(c); - if (code == null) { - out.writeByte(NOT_ENCODED); - Text.writeString(out, c.getName()); - } else { - writeClassCode(out, c); - } - ByteArrayOutputStream bos = null; - ObjectOutputStream oos = null; - try{ - bos = new ByteArrayOutputStream(); - oos = new ObjectOutputStream(bos); - oos.writeObject(instanceObj); - byte[] value = bos.toByteArray(); - out.writeInt(value.length); - out.write(value); - } finally { - if(bos!=null) bos.close(); - if(oos!=null) oos.close(); - } - } else if (Scan.class.isAssignableFrom(declClass)) { - Scan scan = (Scan)instanceObj; - byte [] scanBytes = ProtobufUtil.toScan(scan).toByteArray(); - out.writeInt(scanBytes.length); - out.write(scanBytes); - } else { - throw new IOException("Can't write: "+instanceObj+" as "+declClass); - } - } - - /** Writes the encoded class code as defined in CLASS_TO_CODE, or - * the whole class name if not defined in the mapping. - */ - static void writeClass(DataOutput out, Class c) throws IOException { - Integer code = CLASS_TO_CODE.get(c); - if (code == null) { - WritableUtils.writeVInt(out, NOT_ENCODED); - Text.writeString(out, c.getName()); - } else { - WritableUtils.writeVInt(out, code); - } - } - - /** Reads and returns the class as written by {@link #writeClass(DataOutput, Class)} */ - static Class readClass(Configuration conf, DataInput in) throws IOException { - Class instanceClass = null; - int b = (byte)WritableUtils.readVInt(in); - if (b == NOT_ENCODED) { - String className = Text.readString(in); - try { - instanceClass = getClassByName(conf, className); - } catch (ClassNotFoundException e) { - LOG.error("Can't find class " + className, e); - throw new IOException("Can't find class " + className, e); - } - } else { - instanceClass = CODE_TO_CLASS.get(b); - } - return instanceClass; - } - - /** - * Read a {@link Writable}, {@link String}, primitive type, or an array of - * the preceding. - * @param in - * @param conf - * @return the object - * @throws IOException - */ - public static Object readObject(DataInput in, Configuration conf) - throws IOException { - return readObject(in, null, conf); - } - - /** - * Read a {@link Writable}, {@link String}, primitive type, or an array of - * the preceding. - * @param in - * @param objectWritable - * @param conf - * @return the object - * @throws IOException - */ - @SuppressWarnings("unchecked") - public static Object readObject(DataInput in, - HbaseObjectWritable objectWritable, Configuration conf) - throws IOException { - Class declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); - Object instance; - if (declaredClass.isPrimitive()) { // primitive types - if (declaredClass == Boolean.TYPE) { // boolean - instance = Boolean.valueOf(in.readBoolean()); - } else if (declaredClass == Character.TYPE) { // char - instance = Character.valueOf(in.readChar()); - } else if (declaredClass == Byte.TYPE) { // byte - instance = Byte.valueOf(in.readByte()); - } else if (declaredClass == Short.TYPE) { // short - instance = Short.valueOf(in.readShort()); - } else if (declaredClass == Integer.TYPE) { // int - instance = Integer.valueOf(in.readInt()); - } else if (declaredClass == Long.TYPE) { // long - instance = Long.valueOf(in.readLong()); - } else if (declaredClass == Float.TYPE) { // float - instance = Float.valueOf(in.readFloat()); - } else if (declaredClass == Double.TYPE) { // double - instance = Double.valueOf(in.readDouble()); - } else if (declaredClass == Void.TYPE) { // void - instance = null; - } else { - throw new IllegalArgumentException("Not a primitive: "+declaredClass); - } - } else if (declaredClass.isArray()) { // array - if (declaredClass.equals(byte [].class)) { - instance = Bytes.readByteArray(in); - } else { - int length = in.readInt(); - instance = Array.newInstance(declaredClass.getComponentType(), length); - for (int i = 0; i < length; i++) { - Array.set(instance, i, readObject(in, conf)); - } - } - } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE - Class componentType = readClass(conf, in); - int length = in.readInt(); - instance = Array.newInstance(componentType, length); - for (int i = 0; i < length; i++) { - Array.set(instance, i, readObject(in, conf)); - } - } else if (List.class.isAssignableFrom(declaredClass)) { // List - int length = in.readInt(); - instance = new ArrayList(length); - for (int i = 0; i < length; i++) { - ((ArrayList)instance).add(readObject(in, conf)); - } - } else if (declaredClass == String.class) { // String - instance = Text.readString(in); - } else if (declaredClass.isEnum()) { // enum - instance = Enum.valueOf((Class) declaredClass, - Text.readString(in)); - } else if (declaredClass == Message.class) { - String className = Text.readString(in); - try { - declaredClass = getClassByName(conf, className); - instance = tryInstantiateProtobuf(declaredClass, in); - } catch (ClassNotFoundException e) { - LOG.error("Can't find class " + className, e); - throw new IOException("Can't find class " + className, e); - } - } else if (Scan.class.isAssignableFrom(declaredClass)) { - int length = in.readInt(); - byte [] scanBytes = new byte[length]; - in.readFully(scanBytes); - ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder(); - instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build()); - } else { // Writable or Serializable - Class instanceClass = null; - int b = (byte)WritableUtils.readVInt(in); - if (b == NOT_ENCODED) { - String className = Text.readString(in); - try { - instanceClass = getClassByName(conf, className); - } catch (ClassNotFoundException e) { - LOG.error("Can't find class " + className, e); - throw new IOException("Can't find class " + className, e); - } - } else { - instanceClass = CODE_TO_CLASS.get(b); - } - if(Writable.class.isAssignableFrom(instanceClass)){ - Writable writable = WritableFactories.newInstance(instanceClass, conf); - try { - writable.readFields(in); - } catch (Exception e) { - LOG.error("Error in readFields", e); - throw new IOException("Error in readFields" , e); - } - instance = writable; - if (instanceClass == NullInstance.class) { // null - declaredClass = ((NullInstance)instance).declaredClass; - instance = null; - } - } else { - int length = in.readInt(); - byte[] objectBytes = new byte[length]; - in.readFully(objectBytes); - ByteArrayInputStream bis = null; - ObjectInputStream ois = null; - try { - bis = new ByteArrayInputStream(objectBytes); - ois = new ObjectInputStream(bis); - instance = ois.readObject(); - } catch (ClassNotFoundException e) { - LOG.error("Class not found when attempting to deserialize object", e); - throw new IOException("Class not found when attempting to " + - "deserialize object", e); - } finally { - if(bis!=null) bis.close(); - if(ois!=null) ois.close(); - } - } - } - if (objectWritable != null) { // store values - objectWritable.declaredClass = declaredClass; - objectWritable.instance = instance; - } - return instance; - } - - /** - * Try to instantiate a protocol buffer of the given message class - * from the given input stream. - * - * @param protoClass the class of the generated protocol buffer - * @param dataIn the input stream to read from - * @return the instantiated Message instance - * @throws IOException if an IO problem occurs - */ - public static Message tryInstantiateProtobuf( - Class protoClass, - DataInput dataIn) throws IOException { - - try { - if (dataIn instanceof InputStream) { - // We can use the built-in parseDelimitedFrom and not have to re-copy - // the data - Method parseMethod = getStaticProtobufMethod(protoClass, - "parseDelimitedFrom", InputStream.class); - return (Message)parseMethod.invoke(null, (InputStream)dataIn); - } else { - // Have to read it into a buffer first, since protobuf doesn't deal - // with the DataInput interface directly. - - // Read the size delimiter that writeDelimitedTo writes - int size = ProtoUtil.readRawVarint32(dataIn); - if (size < 0) { - throw new IOException("Invalid size: " + size); - } - - byte[] data = new byte[size]; - dataIn.readFully(data); - Method parseMethod = getStaticProtobufMethod(protoClass, - "parseFrom", byte[].class); - return (Message)parseMethod.invoke(null, data); - } - } catch (InvocationTargetException e) { - - if (e.getCause() instanceof IOException) { - throw (IOException)e.getCause(); - } else { - throw new IOException(e.getCause()); - } - } catch (IllegalAccessException iae) { - throw new AssertionError("Could not access parse method in " + - protoClass); - } - } - - static Method getStaticProtobufMethod(Class declaredClass, String method, - Class ... args) { - - try { - return declaredClass.getMethod(method, args); - } catch (Exception e) { - // This is a bug in Hadoop - protobufs should all have this static method - throw new AssertionError("Protocol buffer class " + declaredClass + - " does not have an accessible parseFrom(InputStream) method!"); - } - } - - @SuppressWarnings("unchecked") - private static Class getClassByName(Configuration conf, String className) - throws ClassNotFoundException { - if(conf != null) { - return conf.getClassByName(className); - } - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - if(cl == null) { - cl = HbaseObjectWritable.class.getClassLoader(); - } - return Class.forName(className, true, cl); - } - - private static void addToMap(final Class clazz, final int code) { - CLASS_TO_CODE.put(clazz, code); - CODE_TO_CLASS.put(code, clazz); - } - - public void setConf(Configuration conf) { - this.conf = conf; - } - - public Configuration getConf() { - return this.conf; - } -} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java (working copy) @@ -1,73 +0,0 @@ -/** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.io; - -import java.util.*; - -import org.apache.hadoop.classification.InterfaceAudience; - -/** - * A Static Interface. - * Instead of having this code in the the HbaseMapWritable code, where it - * blocks the possibility of altering the variables and changing their types, - * it is put here in this static interface where the static final Maps are - * loaded one time. Only byte[] and Cell are supported at this time. - */ -@InterfaceAudience.Private -public interface CodeToClassAndBack { - /** - * Static map that contains mapping from code to class - */ - public static final Map> CODE_TO_CLASS = - new HashMap>(); - - /** - * Static map that contains mapping from class to code - */ - public static final Map, Byte> CLASS_TO_CODE = - new HashMap, Byte>(); - - /** - * Class list for supported classes - */ - public Class[] classList = {byte[].class}; - - /** - * The static loader that is used instead of the static constructor in - * HbaseMapWritable. - */ - public InternalStaticLoader sl = - new InternalStaticLoader(classList, CODE_TO_CLASS, CLASS_TO_CODE); - - /** - * Class that loads the static maps with their values. - */ - public class InternalStaticLoader{ - InternalStaticLoader(Class[] classList, - Map> CODE_TO_CLASS, Map, Byte> CLASS_TO_CODE){ - byte code = 1; - for(int i=0; i declaredClass = Object.class; - if (value != null) { - declaredClass = value.getClass(); - } - return toParameter(declaredClass, value); - } - - /** - * Serialize a Java Object into a Parameter. The Java Object should be a - * Writable or protocol buffer Message - * - * @param declaredClass the declared class of the parameter - * @param value the Writable/Message object to be serialized - * @return the converted protocol buffer Parameter - * @throws IOException if failed to serialize the object - */ - public static NameBytesPair toParameter( - final Class declaredClass, final Object value) throws IOException { - NameBytesPair.Builder builder = NameBytesPair.newBuilder(); - builder.setName(declaredClass.getName()); - if (value != null) { - ByteArrayOutputStream baos = null; - try { - baos = new ByteArrayOutputStream(); - DataOutput out = new DataOutputStream(baos); - Class clz = declaredClass; - if (HbaseObjectWritable.getClassCode(declaredClass) == null) { - clz = value.getClass(); - } - HbaseObjectWritable.writeObject(out, value, clz, null); - builder.setValue( - ByteString.copyFrom(baos.toByteArray())); - } finally { - if (baos != null) { - baos.close(); - } - } - } - return builder.build(); - } - // Start helpers for Client /** @@ -1185,8 +1105,7 @@ if (actions.size() > rowMutations) { MultiRequest request = RequestConverter.buildMultiRequest(regionName, actions); - ClientProtos.MultiResponse - proto = client.multi(null, request); + ClientProtos.MultiResponse proto = client.multi(null, request); List results = ResponseConverter.getResults(proto); for (int i = 0, n = results.size(); i < n; i++) { int originalIndex = actions.get(i).getOriginalIndex(); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java (revision 1426728) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java (working copy) @@ -91,7 +91,8 @@ if (result.hasException()) { results.add(ProtobufUtil.toException(result.getException())); } else if (result.hasValue()) { - Object value = ProtobufUtil.toObject(result.getValue()); + ClientProtos.Result r = result.getValue(); + Object value = ProtobufUtil.toResult(r); if (value instanceof ClientProtos.Result) { results.add(ProtobufUtil.toResult((ClientProtos.Result)value)); } else {