diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index f8c3ec6..89c82ad 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -50,7 +51,6 @@ import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitorBase; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.FailedLogCloseException; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; import org.apache.hadoop.hbase.exceptions.HBaseSnapshotException; import org.apache.hadoop.hbase.exceptions.MasterNotRunningException; import org.apache.hadoop.hbase.exceptions.NotServingRegionException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java index a44dc8a..144f6d8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; +import org.apache.hadoop.hbase.HBaseIOException; public class WrongRowIOException extends HBaseIOException { private static final long serialVersionUID = -5849522209440123059L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java index 8973233..6eb29c1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HBaseIOException; /** * Subclass if exception is not meant to be retried: e.g. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseIOException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseIOException.java deleted file mode 100644 index 37c2b68..0000000 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseIOException.java +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.exceptions; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -import java.io.IOException; - -/** - * All hbase specific IOExceptions should be subclasses of HBaseIOException - */ -@InterfaceAudience.Public -@InterfaceStability.Evolving -public class HBaseIOException extends IOException { - - private static final long serialVersionUID = 1L; - - public HBaseIOException() { - super(); - } - - public HBaseIOException(String message) { - super(message); - } - - public HBaseIOException(String message, Throwable cause) { - super(message, cause); - } - - public HBaseIOException(Throwable cause) { - super(cause); - }} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseSnapshotException.java index f25ed11..60cbf7c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/HBaseSnapshotException.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PleaseHoldException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PleaseHoldException.java index 31bd343..c208ed5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PleaseHoldException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PleaseHoldException.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HBaseIOException; /** * This exception is thrown by the master when a region server was shut down and diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionException.java index c483f16..4d5912c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionException.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.HBaseIOException; /** * Thrown when something happens related to region handling. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/TableInfoMissingException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/TableInfoMissingException.java index b500f10..b0c39e0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/TableInfoMissingException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/TableInfoMissingException.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.HBaseIOException; /** * diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 3b826b0..123b736 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -555,10 +555,10 @@ public final class ProtobufUtil { * @param cellScanner * @param proto the protocol buffer Mutate to convert * @return the converted client Append - * @throws DoNotRetryIOException + * @throws IOException */ public static Append toAppend(final MutationProto proto, final CellScanner cellScanner) - throws DoNotRetryIOException { + throws IOException { MutationType type = proto.getMutateType(); assert type == MutationType.APPEND : type.name(); byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java index 430da21..5c14fa3 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.ipc; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -36,7 +37,7 @@ import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestPayloadCarryingRpcController { @Test - public void testListOfCellScannerables() { + public void testListOfCellScannerables() throws IOException { List cells = new ArrayList(); final int count = 10; for (int i = 0; i < count; i++) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java index 596710f..4f98d7e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellScanner.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hbase; +import java.io.IOException; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; @@ -57,6 +59,7 @@ public interface CellScanner { /** * Advance the scanner 1 cell. * @return true if the next cell is found and {@link #current()} will return a valid Cell + * @throws IOException */ - boolean advance(); -} + boolean advance() throws IOException; +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index d65aaab..c677a0d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase; +import java.io.IOException; import java.nio.ByteBuffer; import java.util.Iterator; import java.util.List; @@ -142,7 +143,7 @@ public final class CellUtil { } @Override - public boolean advance() { + public boolean advance() throws IOException { if (this.cellScanner == null) { if (!this.iterator.hasNext()) return false; this.cellScanner = this.iterator.next().cellScanner(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java new file mode 100644 index 0000000..4023896 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +import java.io.IOException; + +/** + * All hbase specific IOExceptions should be subclasses of HBaseIOException + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class HBaseIOException extends IOException { + + private static final long serialVersionUID = 1L; + + public HBaseIOException() { + super(); + } + + public HBaseIOException(String message) { + super(message); + } + + public HBaseIOException(String message, Throwable cause) { + super(message, cause); + } + + public HBaseIOException(Throwable cause) { + super(cause); + }} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java index 0edb781..3b95c53 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java @@ -32,17 +32,13 @@ public abstract class BaseDecoder implements Codec.Decoder { } @Override - public boolean advance() { + public boolean advance() throws IOException { if (!this.hasNext) return this.hasNext; - try { - if (this.in.available() <= 0) { - this.hasNext = false; - return this.hasNext; - } - this.current = parseCell(); - } catch (IOException e) { - throw new RuntimeException(e); + if (this.in.available() <= 0) { + this.hasNext = false; + return this.hasNext; } + this.current = parseCell(); return this.hasNext; } @@ -56,4 +52,4 @@ public abstract class BaseDecoder implements Codec.Decoder { public Cell current() { return this.current; } -} +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java index 941fb0e..a2430dc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java @@ -42,10 +42,6 @@ public abstract class BaseEncoder implements Codec.Encoder { public void flush() throws IOException { if (this.flushed) return; this.flushed = true; - try { - this.out.flush(); - } catch (IOException e) { - throw new CodecException(e); - } + this.out.flush(); } } \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java index 2acf9de..7fa8695 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java @@ -40,22 +40,18 @@ public class CellCodec implements Codec { @Override public void write(Cell cell) throws IOException { checkFlushed(); - try { - // Row - write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); - // Column family - write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()); - // Qualifier - write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); - // Version - this.out.write(Bytes.toBytes(cell.getTimestamp())); - // Type - this.out.write(cell.getTypeByte()); - // Value - write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); - } catch (IOException e) { - throw new CodecException(e); - } + // Row + write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); + // Column family + write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()); + // Qualifier + write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); + // Version + this.out.write(Bytes.toBytes(cell.getTimestamp())); + // Type + this.out.write(cell.getTypeByte()); + // Value + write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java index 15919cd..8124d9e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CodecException.java @@ -17,12 +17,16 @@ */ package org.apache.hadoop.hbase.codec; -import java.io.IOException; -public class CodecException extends IOException { - private static final long serialVersionUID = -2850095011686914405L; +import org.apache.hadoop.hbase.HBaseIOException; +/** + * Thrown when problems in the codec whether setup or context. + */ +@SuppressWarnings("serial") +public class CodecException extends HBaseIOException { public CodecException() { + super(); } public CodecException(String message) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java index 8e66fef..0cf2dae 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java @@ -53,11 +53,7 @@ public class KeyValueCodec implements Codec { checkFlushed(); // This is crass and will not work when KV changes. Also if passed a non-kv Cell, it will // make expensive copy. - try { - KeyValue.oswrite((KeyValue)KeyValueUtil.ensureKeyValue(cell), this.out); - } catch (IOException e) { - throw new CodecException(e); - } + KeyValue.oswrite((KeyValue)KeyValueUtil.ensureKeyValue(cell), this.out); } } diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java index aad32d8..5f319fc 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.row.data; +import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.Cell; @@ -61,12 +62,16 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ @Override public void individualSearcherAssertions(CellSearcher searcher) { - assertRowOffsetsCorrect(); + assertRowOffsetsCorrect(); searcher.resetToBeforeFirstEntry(); //test first cell - searcher.advance(); + try { + searcher.advance(); + } catch (IOException e) { + throw new RuntimeException(e); + } Cell first = searcher.current(); Assert.assertTrue(CellComparator.equals(d.get(0), first)); diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java index 7fbde65..6c3750a 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.row.data; +import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.Cell; @@ -66,7 +67,11 @@ public class TestRowDataSimple extends BaseTestRowData { searcher.resetToBeforeFirstEntry(); // test first cell - searcher.advance(); + try { + searcher.advance(); + } catch (IOException e) { + throw new RuntimeException(e); + } Cell first = searcher.current(); Assert.assertTrue(CellComparator.equals(d.get(0), first)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java index fcf0515..d7183d9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.codec.BaseDecoder; import org.apache.hadoop.hbase.codec.BaseEncoder; import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.codec.CodecException; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import com.google.protobuf.ByteString; @@ -62,11 +61,7 @@ public class MessageCodec implements Codec { builder.setValue(ByteString.copyFrom(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); HBaseProtos.Cell pbcell = builder.build(); - try { - pbcell.writeDelimitedTo(this.out); - } catch (IOException e) { - throw new CodecException(e); - } + pbcell.writeDelimitedTo(this.out); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 2beac77..931c716 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Chore; import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.ClusterStatus; +import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -69,7 +70,6 @@ import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitorBase; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; import org.apache.hadoop.hbase.exceptions.MasterNotRunningException; import org.apache.hadoop.hbase.exceptions.NotAllMetaRegionsOnlineException; import org.apache.hadoop.hbase.exceptions.PleaseHoldException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java index 6d45aff..54f7853 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.util.Arrays; import java.util.ConcurrentModificationException; @@ -42,7 +43,7 @@ public class TestPutDeleteEtcCellIteration { private static final int COUNT = 10; @Test - public void testPutIteration() { + public void testPutIteration() throws IOException { Put p = new Put(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -58,7 +59,7 @@ public class TestPutDeleteEtcCellIteration { } @Test (expected = ConcurrentModificationException.class) - public void testPutConcurrentModificationOnIteration() { + public void testPutConcurrentModificationOnIteration() throws IOException { Put p = new Put(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -77,7 +78,7 @@ public class TestPutDeleteEtcCellIteration { } @Test - public void testDeleteIteration() { + public void testDeleteIteration() throws IOException { Delete d = new Delete(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -93,7 +94,7 @@ public class TestPutDeleteEtcCellIteration { } @Test - public void testAppendIteration() { + public void testAppendIteration() throws IOException { Append a = new Append(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -111,7 +112,7 @@ public class TestPutDeleteEtcCellIteration { } @Test - public void testIncrementIteration() { + public void testIncrementIteration() throws IOException { Increment increment = new Increment(ROW); for (int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); @@ -131,7 +132,7 @@ public class TestPutDeleteEtcCellIteration { } @Test - public void testResultIteration() { + public void testResultIteration() throws IOException { Cell [] cells = new Cell[COUNT]; for(int i = 0; i < COUNT; i++) { byte [] bytes = Bytes.toBytes(i); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java index 1e8a798..c4cd185 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java @@ -37,6 +37,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -51,7 +52,6 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.exceptions.HBaseIOException; import org.apache.hadoop.hbase.exceptions.MasterNotRunningException; import org.apache.hadoop.hbase.exceptions.UnknownRegionException; import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;