Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java (working copy) @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -50,7 +51,6 @@ import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitorBase; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.FailedLogCloseException; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; import org.apache.hadoop.hbase.exceptions.HBaseSnapshotException; import org.apache.hadoop.hbase.exceptions.MasterNotRunningException; import org.apache.hadoop.hbase.exceptions.NotServingRegionException; Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java (working copy) @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; +import org.apache.hadoop.hbase.HBaseIOException; public class WrongRowIOException extends HBaseIOException { private static final long serialVersionUID = -5849522209440123059L; Index: hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java (working copy) @@ -20,6 +20,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HBaseIOException; /** * Subclass if exception is not meant to be retried: e.g. Index: hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseIOException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseIOException.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseIOException.java (working copy) @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.exceptions; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -import java.io.IOException; - -/** - * All hbase specific IOExceptions should be subclasses of HBaseIOException - */ -@InterfaceAudience.Public -@InterfaceStability.Evolving -public class HBaseIOException extends IOException { - - private static final long serialVersionUID = 1L; - - public HBaseIOException() { - super(); - } - - public HBaseIOException(String message) { - super(message); - } - - public HBaseIOException(String message, Throwable cause) { - super(message, cause); - } - - public HBaseIOException(Throwable cause) { - super(cause); - }} Index: hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseSnapshotException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseSnapshotException.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseSnapshotException.java (working copy) @@ -19,6 +19,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; /** Index: hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PleaseHoldException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PleaseHoldException.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PleaseHoldException.java (working copy) @@ -20,6 +20,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HBaseIOException; /** * This exception is thrown by the master when a region server was shut down and Index: hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionException.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionException.java (working copy) @@ -20,6 +20,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HBaseIOException; /** * Thrown when something happens related to region handling. Index: hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/TableInfoMissingException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/TableInfoMissingException.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/TableInfoMissingException.java (working copy) @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HBaseIOException; /** * Index: hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java (working copy) @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; +import org.apache.hadoop.hbase.HBaseIOException; public class StoppedRpcClientException extends HBaseIOException { public StoppedRpcClientException() { @@ -27,4 +27,4 @@ public StoppedRpcClientException(String msg) { super(msg); } -} \ No newline at end of file +} Index: hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (revision 1478652) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (working copy) @@ -559,10 +559,10 @@ * @param cellScanner * @param proto the protocol buffer Mutate to convert * @return the converted client Append - * @throws DoNotRetryIOException + * @throws IOException */ public static Append toAppend(final MutationProto proto, final CellScanner cellScanner) - throws DoNotRetryIOException { + throws IOException { MutationType type = proto.getMutateType(); assert type == MutationType.APPEND : type.name(); byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null; Index: hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java =================================================================== --- hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java (revision 1478652) +++ hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java (working copy) @@ -21,6 +21,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -36,7 +37,7 @@ @Category(SmallTests.class) public class TestPayloadCarryingRpcController { @Test - public void testListOfCellScannerables() { + public void testListOfCellScannerables() throws IOException { List cells = new ArrayList(); final int count = 10; for (int i = 0; i < count; i++) { Index: hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java (revision 1478652) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java (working copy) @@ -18,6 +18,8 @@ package org.apache.hadoop.hbase; +import java.io.IOException; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; @@ -57,6 +59,7 @@ /** * Advance the scanner 1 cell. * @return true if the next cell is found and {@link #current()} will return a valid Cell + * @throws IOException */ - boolean advance(); -} + boolean advance() throws IOException; +} \ No newline at end of file Index: hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java (revision 1478652) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java (working copy) @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase; +import java.io.IOException; import java.nio.ByteBuffer; import java.util.Iterator; import java.util.List; @@ -142,7 +143,7 @@ } @Override - public boolean advance() { + public boolean advance() throws IOException { if (this.cellScanner == null) { if (!this.iterator.hasNext()) return false; this.cellScanner = this.iterator.next().cellScanner(); Index: hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java (revision 0) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java (working copy) @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +import java.io.IOException; + +/** + * All hbase specific IOExceptions should be subclasses of HBaseIOException + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class HBaseIOException extends IOException { + + private static final long serialVersionUID = 1L; + + public HBaseIOException() { + super(); + } + + public HBaseIOException(String message) { + super(message); + } + + public HBaseIOException(String message, Throwable cause) { + super(message, cause); + } + + public HBaseIOException(Throwable cause) { + super(cause); + }} Index: hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java (revision 1478652) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java (working copy) @@ -32,17 +32,13 @@ } @Override - public boolean advance() { + public boolean advance() throws IOException { if (!this.hasNext) return this.hasNext; - try { - if (this.in.available() <= 0) { - this.hasNext = false; - return this.hasNext; - } - this.current = parseCell(); - } catch (IOException e) { - throw new RuntimeException(e); + if (this.in.available() <= 0) { + this.hasNext = false; + return this.hasNext; } + this.current = parseCell(); return this.hasNext; } @@ -56,4 +52,4 @@ public Cell current() { return this.current; } -} +} \ No newline at end of file Index: hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java (revision 1478652) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java (working copy) @@ -42,10 +42,6 @@ public void flush() throws IOException { if (this.flushed) return; this.flushed = true; - try { - this.out.flush(); - } catch (IOException e) { - throw new CodecException(e); - } + this.out.flush(); } } \ No newline at end of file Index: hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java (revision 1478652) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java (working copy) @@ -40,22 +40,18 @@ @Override public void write(Cell cell) throws IOException { checkFlushed(); - try { - // Row - write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); - // Column family - write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()); - // Qualifier - write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); - // Version - this.out.write(Bytes.toBytes(cell.getTimestamp())); - // Type - this.out.write(cell.getTypeByte()); - // Value - write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); - } catch (IOException e) { - throw new CodecException(e); - } + // Row + write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); + // Column family + write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()); + // Qualifier + write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); + // Version + this.out.write(Bytes.toBytes(cell.getTimestamp())); + // Type + this.out.write(cell.getTypeByte()); + // Value + write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); } /** Index: hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java (revision 1478652) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java (working copy) @@ -17,12 +17,16 @@ */ package org.apache.hadoop.hbase.codec; -import java.io.IOException; -public class CodecException extends IOException { - private static final long serialVersionUID = -2850095011686914405L; +import org.apache.hadoop.hbase.HBaseIOException; +/** + * Thrown when problems in the codec whether setup or context. + */ +@SuppressWarnings("serial") +public class CodecException extends HBaseIOException { public CodecException() { + super(); } public CodecException(String message) { Index: hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java (revision 1478652) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java (working copy) @@ -53,11 +53,7 @@ checkFlushed(); // This is crass and will not work when KV changes. Also if passed a non-kv Cell, it will // make expensive copy. - try { - KeyValue.oswrite((KeyValue)KeyValueUtil.ensureKeyValue(cell), this.out); - } catch (IOException e) { - throw new CodecException(e); - } + KeyValue.oswrite((KeyValue)KeyValueUtil.ensureKeyValue(cell), this.out); } } Index: hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java =================================================================== --- hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java (revision 1478652) +++ hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java (working copy) @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.row.data; +import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.Cell; @@ -61,12 +62,16 @@ @Override public void individualSearcherAssertions(CellSearcher searcher) { - assertRowOffsetsCorrect(); + assertRowOffsetsCorrect(); searcher.resetToBeforeFirstEntry(); //test first cell - searcher.advance(); + try { + searcher.advance(); + } catch (IOException e) { + throw new RuntimeException(e); + } Cell first = searcher.current(); Assert.assertTrue(CellComparator.equals(d.get(0), first)); Index: hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java =================================================================== --- hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java (revision 1478652) +++ hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java (working copy) @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.row.data; +import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.Cell; @@ -66,7 +67,11 @@ searcher.resetToBeforeFirstEntry(); // test first cell - searcher.advance(); + try { + searcher.advance(); + } catch (IOException e) { + throw new RuntimeException(e); + } Cell first = searcher.current(); Assert.assertTrue(CellComparator.equals(d.get(0), first)); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java (revision 1478652) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java (working copy) @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.codec.BaseDecoder; import org.apache.hadoop.hbase.codec.BaseEncoder; import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.codec.CodecException; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import com.google.protobuf.ByteString; @@ -62,11 +61,7 @@ builder.setValue(ByteString.copyFrom(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); HBaseProtos.Cell pbcell = builder.build(); - try { - pbcell.writeDelimitedTo(this.out); - } catch (IOException e) { - throw new CodecException(e); - } + pbcell.writeDelimitedTo(this.out); } } Index: hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java (revision 1478652) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java (working copy) @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.Chore; import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.ClusterStatus; +import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -66,7 +67,6 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; import org.apache.hadoop.hbase.exceptions.MasterNotRunningException; import org.apache.hadoop.hbase.exceptions.NotAllMetaRegionsOnlineException; import org.apache.hadoop.hbase.exceptions.PleaseHoldException; Index: hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java (revision 1478652) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java (working copy) @@ -21,6 +21,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.util.Arrays; import java.util.ConcurrentModificationException; @@ -42,7 +43,7 @@ private static final int COUNT = 10; @Test - public void testPutIteration() { + public void testPutIteration() throws IOException { Put p = new Put(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -58,7 +59,7 @@ } @Test (expected = ConcurrentModificationException.class) - public void testPutConcurrentModificationOnIteration() { + public void testPutConcurrentModificationOnIteration() throws IOException { Put p = new Put(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -77,7 +78,7 @@ } @Test - public void testDeleteIteration() { + public void testDeleteIteration() throws IOException { Delete d = new Delete(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -93,7 +94,7 @@ } @Test - public void testAppendIteration() { + public void testAppendIteration() throws IOException { Append a = new Append(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -111,7 +112,7 @@ } @Test - public void testIncrementIteration() { + public void testIncrementIteration() throws IOException { Increment increment = new Increment(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -131,7 +132,7 @@ } @Test - public void testResultIteration() { + public void testResultIteration() throws IOException { Cell [] cells = new Cell[COUNT]; for(int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); Index: hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java (revision 1478652) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java (working copy) @@ -113,8 +113,12 @@ // building. CellScanner cellScanner = pcrc.cellScanner(); List list = new ArrayList(); - while(cellScanner.advance()) { - list.add(cellScanner.current()); + try { + while(cellScanner.advance()) { + list.add(cellScanner.current()); + } + } catch (IOException e) { + throw new ServiceException(e); } cellScanner = CellUtil.createCellScanner(list); ((PayloadCarryingRpcController)controller).setCellScanner(cellScanner); @@ -155,7 +159,7 @@ @Test public void testCompressCellBlock() throws IOException, InterruptedException, SecurityException, NoSuchMethodException { - // Currently, you set + // Currently, you set Configuration conf = HBaseConfiguration.create(); conf.set("hbase.client.rpc.compressor", GzipCodec.class.getCanonicalName()); TestRpcServer rpcServer = new TestRpcServer(); @@ -265,4 +269,4 @@ rpcServer.stop(); } } -} \ No newline at end of file +} Index: hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java (revision 1478652) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java (working copy) @@ -34,6 +34,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -48,7 +49,6 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; import org.apache.hadoop.hbase.exceptions.MasterNotRunningException; import org.apache.hadoop.hbase.exceptions.UnknownRegionException; import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;