Index: src/test/org/apache/hcatalog/pig/TestPigStorageDriver.java =================================================================== --- src/test/org/apache/hcatalog/pig/TestPigStorageDriver.java (revision 1097565) +++ src/test/org/apache/hcatalog/pig/TestPigStorageDriver.java (working copy) @@ -38,6 +38,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.UnknownTableException; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; @@ -78,7 +79,7 @@ super.tearDown(); } - public void testPigStorageDriver() throws IOException{ + public void testPigStorageDriver() throws IOException, CommandNeedRetryException{ String fsLoc = howlConf.get("fs.default.name"); @@ -133,7 +134,7 @@ howlDriver.run("drop table junit_pigstorage"); } - public void testDelim() throws MetaException, TException, UnknownTableException, NoSuchObjectException, InvalidOperationException, IOException{ + public void testDelim() throws MetaException, TException, UnknownTableException, NoSuchObjectException, InvalidOperationException, IOException, CommandNeedRetryException{ howlDriver.run("drop table junit_pigstorage_delim"); Index: src/test/org/apache/hcatalog/pig/TestHCatLoader.java =================================================================== --- src/test/org/apache/hcatalog/pig/TestHCatLoader.java (revision 1097565) +++ src/test/org/apache/hcatalog/pig/TestHCatLoader.java (working copy) @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hcatalog.MiniCluster; @@ -61,10 +62,10 @@ private static Map> basicInputData; - private void dropTable(String tablename) throws IOException{ + private void dropTable(String tablename) throws IOException, CommandNeedRetryException{ driver.run("drop table "+tablename); } - private void createTable(String tablename, String schema, String partitionedBy) throws IOException{ + private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException{ String createTable; createTable = "create table "+tablename+"("+schema+") "; if ((partitionedBy != null)&&(!partitionedBy.trim().isEmpty())){ @@ -78,7 +79,7 @@ } } - private void createTable(String tablename, String schema) throws IOException{ + private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException{ createTable(tablename,schema,null); } @@ -154,7 +155,7 @@ server.executeBatch(); } - private void cleanup() throws IOException { + private void cleanup() throws IOException, CommandNeedRetryException { MiniCluster.deleteFile(cluster, basicFile); MiniCluster.deleteFile(cluster, complexFile); dropTable(BASIC_TABLE); @@ -271,7 +272,7 @@ } - public void testReadPartitionedBasic() throws IOException { + public void testReadPartitionedBasic() throws IOException, CommandNeedRetryException { PigServer server = new PigServer(ExecType.LOCAL, props); driver.run("select * from "+PARTITIONED_TABLE); Index: src/test/org/apache/hcatalog/pig/TestHCatStorerMulti.java =================================================================== --- src/test/org/apache/hcatalog/pig/TestHCatStorerMulti.java (revision 1097565) +++ src/test/org/apache/hcatalog/pig/TestHCatStorerMulti.java (working copy) @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hcatalog.MiniCluster; @@ -48,10 +49,10 @@ private static Map> basicInputData; - private void dropTable(String tablename) throws IOException{ + private void dropTable(String tablename) throws IOException, CommandNeedRetryException{ driver.run("drop table "+tablename); } - private void createTable(String tablename, String schema, String partitionedBy) throws IOException{ + private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException{ String createTable; createTable = "create table "+tablename+"("+schema+") "; if ((partitionedBy != null)&&(!partitionedBy.trim().isEmpty())){ @@ -65,7 +66,7 @@ } } - private void createTable(String tablename, String schema) throws IOException{ + private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException{ createTable(tablename,schema,null); } @@ -184,7 +185,7 @@ MiniCluster.createInputFile(cluster, basicFile, input); } - private void cleanup() throws IOException { + private void cleanup() throws IOException, CommandNeedRetryException { MiniCluster.deleteFile(cluster, basicFile); dropTable(BASIC_TABLE); dropTable(PARTITIONED_TABLE); Index: src/test/org/apache/hcatalog/pig/TestHCatStorer.java =================================================================== --- src/test/org/apache/hcatalog/pig/TestHCatStorer.java (revision 1097565) +++ src/test/org/apache/hcatalog/pig/TestHCatStorer.java (working copy) @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hcatalog.MiniCluster; @@ -101,7 +102,7 @@ // // } - public void testPartColsInData() throws IOException{ + public void testPartColsInData() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as RCFILE " + @@ -140,7 +141,7 @@ MiniCluster.deleteFile(cluster, fileName); } - public void testMultiPartColsInData() throws IOException{ + public void testMultiPartColsInData() throws IOException, CommandNeedRetryException{ driver.run("drop table employee"); String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + @@ -187,7 +188,7 @@ driver.run("drop table employee"); } - public void testStoreInPartiitonedTbl() throws IOException{ + public void testStoreInPartiitonedTbl() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as RCFILE " + @@ -226,7 +227,7 @@ MiniCluster.deleteFile(cluster, fileName); } - public void testNoAlias() throws IOException{ + public void testNoAlias() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_parted"); String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as RCFILE " + "tblproperties('"+HCatConstants.HCAT_ISD_CLASS+"'='"+RCFileInputDriver.class.getName()+"'," + @@ -272,7 +273,7 @@ assertTrue(errCaught); } - public void testStoreMultiTables() throws IOException{ + public void testStoreMultiTables() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(a int, b string) stored as RCFILE " + @@ -333,7 +334,7 @@ } - public void testStoreWithNoSchema() throws IOException{ + public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(a int, b string) stored as RCFILE " + @@ -375,7 +376,7 @@ } - public void testStoreWithNoCtorArgs() throws IOException{ + public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(a int, b string) stored as RCFILE " + @@ -417,7 +418,7 @@ } - public void testEmptyStore() throws IOException{ + public void testEmptyStore() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(a int, b string) stored as RCFILE " + @@ -456,7 +457,7 @@ } - public void testBagNStruct() throws IOException{ + public void testBagNStruct() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(b string,a struct, arr_of_struct array, " + "arr_of_struct2 array>, arr_of_struct3 array>) stored as RCFILE " + @@ -494,7 +495,7 @@ } - public void testStoreFuncAllSimpleTypes() throws IOException{ + public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string) stored as RCFILE " + @@ -541,7 +542,7 @@ - public void testStoreFuncSimple() throws IOException{ + public void testStoreFuncSimple() throws IOException, CommandNeedRetryException{ driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(a int, b string) stored as RCFILE " + Index: src/test/org/apache/hcatalog/cli/TestUseDatabase.java =================================================================== --- src/test/org/apache/hcatalog/cli/TestUseDatabase.java (revision 1097565) +++ src/test/org/apache/hcatalog/cli/TestUseDatabase.java (working copy) @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; @@ -51,7 +52,7 @@ private final String dbName = "testUseDatabase_db"; private final String tblName = "testUseDatabase_tbl"; - public void testAlterTablePass() throws IOException{ + public void testAlterTablePass() throws IOException, CommandNeedRetryException{ howlDriver.run("create database " + dbName); howlDriver.run("use " + dbName); Index: src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java =================================================================== --- src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (revision 1097565) +++ src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; @@ -74,7 +75,7 @@ String query; private final String tblName = "junit_sem_analysis"; - public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException { + public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException { hiveDriver.run("drop table junit_sem_analysis"); hiveDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE"); @@ -102,7 +103,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testDatabaseOperations() throws MetaException { + public void testDatabaseOperations() throws MetaException, CommandNeedRetryException { List dbs = msc.getAllDatabases(); String testDb1 = "testdatabaseoperatons1"; @@ -129,7 +130,7 @@ assertFalse(msc.getAllDatabases().contains(testDb2)); } - public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException{ + public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException{ howlDriver.run("drop table "+tblName); howlDriver.run("create table junit_sem_analysis (a int) stored as RCFILE"); @@ -159,7 +160,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testAlterTblTouch(){ + public void testAlterTblTouch() throws CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); howlDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); @@ -174,7 +175,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testChangeColumns(){ + public void testChangeColumns() throws CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); howlDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE"); CommandProcessorResponse response = howlDriver.run("alter table junit_sem_analysis change a a1 int"); @@ -191,7 +192,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testAddReplaceCols() throws IOException, MetaException, TException, NoSuchObjectException{ + public void testAddReplaceCols() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); howlDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE"); @@ -211,7 +212,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testAlterTblClusteredBy(){ + public void testAlterTblClusteredBy() throws CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); howlDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); @@ -221,7 +222,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException{ + public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); howlDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); @@ -248,7 +249,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testAddPartFail(){ + public void testAddPartFail() throws CommandNeedRetryException{ hiveDriver.run("drop table junit_sem_analysis"); hiveDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); @@ -259,7 +260,7 @@ hiveDriver.run("drop table junit_sem_analysis"); } - public void testAddPartPass() throws IOException{ + public void testAddPartPass() throws IOException, CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); howlDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); @@ -269,7 +270,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testCTAS(){ + public void testCTAS() throws CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int) as select * from tbl2"; CommandProcessorResponse response = howlDriver.run(query); @@ -278,7 +279,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testStoredAs(){ + public void testStoredAs() throws CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int)"; CommandProcessorResponse response = howlDriver.run(query); @@ -287,7 +288,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testAddDriverInfo() throws IOException, MetaException, TException, NoSuchObjectException{ + public void testAddDriverInfo() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as " + @@ -305,7 +306,7 @@ howlDriver.run("drop table junit_sem_analysis"); } - public void testInvalidateNonStringPartition() throws IOException{ + public void testInvalidateNonStringPartition() throws IOException, CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int) partitioned by (b int) stored as RCFILE"; @@ -317,7 +318,7 @@ } - public void testInvalidateSeqFileStoredAs() throws IOException{ + public void testInvalidateSeqFileStoredAs() throws IOException, CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as SEQUENCEFILE"; @@ -329,7 +330,7 @@ } - public void testInvalidateTextFileStoredAs() throws IOException{ + public void testInvalidateTextFileStoredAs() throws IOException, CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE"; @@ -341,7 +342,7 @@ } - public void testInvalidateClusteredBy() throws IOException{ + public void testInvalidateClusteredBy() throws IOException, CommandNeedRetryException{ howlDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE"; @@ -352,7 +353,7 @@ response.getErrorMessage()); } - public void testCTLFail() throws IOException{ + public void testCTLFail() throws IOException, CommandNeedRetryException{ hiveDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"; @@ -364,7 +365,7 @@ assertEquals("FAILED: Error in semantic analysis: Operation not supported. CREATE TABLE LIKE is not supported.", response.getErrorMessage()); } - public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException{ + public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{ try{ howlDriver.run("drop table junit_sem_analysis"); Index: src/java/org/apache/hcatalog/cli/HCatCli.java =================================================================== --- src/java/org/apache/hcatalog/cli/HCatCli.java (revision 1097565) +++ src/java/org/apache/hcatalog/cli/HCatCli.java (working copy) @@ -40,6 +40,7 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.processors.SetProcessor; import org.apache.hadoop.hive.ql.session.SessionState; @@ -219,7 +220,7 @@ return new SetProcessor().run(cmd.substring(firstToken.length()).trim()).getResponseCode(); } - Driver driver = new HCatDriver(); + HCatDriver driver = new HCatDriver(); int ret = driver.run(cmd).getResponseCode(); @@ -240,7 +241,11 @@ ss.err.println("Failed with exception " + e.getClass().getName() + ":" + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); ret = 1; - } + } catch (CommandNeedRetryException e) { + ss.err.println("Failed with exception " + e.getClass().getName() + ":" + + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); + ret = 1; + } int cret = driver.close(); if (ret == 0) { Index: src/java/org/apache/hcatalog/cli/HCatDriver.java =================================================================== --- src/java/org/apache/hcatalog/cli/HCatDriver.java (revision 1097565) +++ src/java/org/apache/hcatalog/cli/HCatDriver.java (working copy) @@ -23,6 +23,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -36,7 +37,12 @@ @Override public CommandProcessorResponse run(String command) { - int ret = super.run(command).getResponseCode(); + int ret = -1; + try { + ret = super.run(command).getResponseCode(); + } catch (CommandNeedRetryException e) { + return new CommandProcessorResponse(ret, e.toString(), ""); + } SessionState ss = SessionState.get();