Index: src/main/java/org/apache/hadoop/hbase/client/ConnectionClosedException.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/ConnectionClosedException.java (revision 0) +++ src/main/java/org/apache/hadoop/hbase/client/ConnectionClosedException.java (revision 0) @@ -0,0 +1,36 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client; + +import java.io.IOException; + +/** + * Thrown when HConnection has been closed. + */ +public class ConnectionClosedException extends IOException { + private static final long serialVersionUID = 8792360655678089586L; + + public ConnectionClosedException() { + super(); + } + + public ConnectionClosedException(String s) { + super(s); + } +} Property changes on: src\main\java\org\apache\hadoop\hbase\client\ConnectionClosedException.java ___________________________________________________________________ Added: svn:needs-lock + * Index: src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (revision 1229032) +++ src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (working copy) @@ -29,8 +29,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.NoSuchElementException; -import java.util.Map.Entry; import java.util.Set; import java.util.SortedMap; import java.util.TreeSet; @@ -787,7 +787,8 @@ private HRegionLocation locateRegion(final byte [] tableName, final byte [] row, boolean useCache) throws IOException { - if (this.closed) throw new IOException(toString() + " closed"); + if (this.closed) + throw new ConnectionClosedException(toString() + " closed"); if (tableName == null || tableName.length == 0) { throw new IllegalArgumentException( "table name cannot be null or zero length"); Index: src/main/java/org/apache/hadoop/hbase/client/HTable.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/HTable.java (revision 1229032) +++ src/main/java/org/apache/hadoop/hbase/client/HTable.java (working copy) @@ -50,7 +50,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.UnknownScannerException; -import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.HConnectionManager.HConnectable; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.util.Bytes; @@ -857,6 +856,12 @@ public void flushCommits() throws IOException { try { connection.processBatchOfPuts(writeBuffer, tableName, pool); + } catch (IOException ioe) { + if (ioe instanceof ConnectionClosedException) { + this.connection = HConnectionManager.getConnection(this + .getConfiguration()); + } + throw ioe; } finally { if (clearBufferOnFail) { writeBuffer.clear(); Index: src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java (revision 1229032) +++ src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java (working copy) @@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.KeyOnlyFilter; @@ -63,7 +64,6 @@ import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.AfterClass; @@ -4032,6 +4032,38 @@ queue.put(new Object()); } + /** + * Test HConnection can be re-created in HTable after this connection has been + * closed. + * @throws IOException + */ + @Test + public void testHTableConnectionRecovery() throws IOException { + final byte[] COLUMN_FAMILY = Bytes.toBytes("columnfam"); + final byte[] COLUMN = Bytes.toBytes("col"); + HTable table = TEST_UTIL.createTable( + Bytes.toBytes("testConnectionRecover"), new byte[][] { COLUMN_FAMILY }); + Put put01 = new Put(Bytes.toBytes("testrow1")); + put01.add(COLUMN_FAMILY, COLUMN, Bytes.toBytes("testValue")); + table.put(put01); + // At this time, abort the connection. + HConnection conn1 = table.getConnection(); + conn1.abort("Test Connection Abort", new IOException("TestIOE")); + // This put will fail, but trigger to create a new connection. + try { + Put put02 = new Put(Bytes.toBytes("testrow1")); + put02.add(COLUMN_FAMILY, COLUMN, Bytes.toBytes("testValue")); + table.put(put02); + } catch (IOException ioe) { + LOG.info( + "It's an expected IOE, for the original connection has been closed.", + ioe); + } + // Get the connection again, and check whether it has been changed. + HConnection conn2 = table.getConnection(); + assertTrue(conn1 != conn2); + } + }