diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index d8d900b..34f897b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -2025,8 +2025,8 @@ private int msck(Hive db, MsckDesc msckDesc) { List repairOutput = new ArrayList(); try { HiveMetaStoreChecker checker = new HiveMetaStoreChecker(db); - Table t = db.newTable(msckDesc.getTableName()); - checker.checkMetastore(t.getDbName(), t.getTableName(), msckDesc.getPartSpecs(), result); + String[] names = Utilities.getDbTableName(msckDesc.getTableName()); + checker.checkMetastore(names[0], names[1], msckDesc.getPartSpecs(), result); List partsNotInMs = result.getPartitionsNotInMs(); if (msckDesc.isRepairPartitions() && !partsNotInMs.isEmpty()) { Table table = db.getTable(msckDesc.getTableName()); @@ -4366,10 +4366,10 @@ private int createTableLike(Hive db, CreateTableLikeDesc crtTbl) throws HiveExce // find out database name and table name of target table String targetTableName = crtTbl.getTableName(); - Table newTable = db.newTable(targetTableName); + String[] names = Utilities.getDbTableName(targetTableName); - tbl.setDbName(newTable.getDbName()); - tbl.setTableName(newTable.getTableName()); + tbl.setDbName(names[0]); + tbl.setTableName(names[1]); if (crtTbl.getLocation() != null) { tbl.setDataLocation(new Path(crtTbl.getLocation())); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 4d35176..250756c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -396,13 +396,13 @@ public void createTable(String tableName, List columns, */ public void alterTable(String tblName, Table newTbl) throws InvalidOperationException, HiveException { - Table t = newTable(tblName); + String[] names = Utilities.getDbTableName(tblName); try { // Remove the DDL_TIME so it gets refreshed if (newTbl.getParameters() != null) { newTbl.getParameters().remove(hive_metastoreConstants.DDL_TIME); } - getMSC().alter_table(t.getDbName(), t.getTableName(), newTbl.getTTable()); + getMSC().alter_table(names[0], names[1], newTbl.getTTable()); } catch (MetaException e) { throw new HiveException("Unable to alter table.", e); } catch (TException e) { @@ -445,8 +445,8 @@ public void alterIndex(String dbName, String baseTblName, String idxName, Index */ public void alterPartition(String tblName, Partition newPart) throws InvalidOperationException, HiveException { - Table t = newTable(tblName); - alterPartition(t.getDbName(), t.getTableName(), newPart); + String[] names = Utilities.getDbTableName(tblName); + alterPartition(names[0], names[1], newPart); } /** @@ -491,7 +491,7 @@ public void alterPartition(String dbName, String tblName, Partition newPart) */ public void alterPartitions(String tblName, List newParts) throws InvalidOperationException, HiveException { - Table t = newTable(tblName); + String[] names = Utilities.getDbTableName(tblName); List newTParts = new ArrayList(); try { @@ -502,7 +502,7 @@ public void alterPartitions(String tblName, List newParts) } newTParts.add(tmpPart.getTPartition()); } - getMSC().alter_partitions(t.getDbName(), t.getTableName(), newTParts); + getMSC().alter_partitions(names[0], names[1], newTParts); } catch (MetaException e) { throw new HiveException("Unable to alter partition.", e); } catch (TException e) { @@ -828,8 +828,8 @@ public Index getIndex(String qualifiedIndexName) throws HiveException { } public Index getIndex(String baseTableName, String indexName) throws HiveException { - Table t = newTable(baseTableName); - return this.getIndex(t.getDbName(), t.getTableName(), indexName); + String[] names = Utilities.getDbTableName(baseTableName); + return this.getIndex(names[0], names[1], indexName); } public Index getIndex(String dbName, String baseTableName, @@ -861,8 +861,8 @@ public boolean dropIndex(String db_name, String tbl_name, String index_name, boo * thrown if the drop fails */ public void dropTable(String tableName) throws HiveException { - Table t = newTable(tableName); - dropTable(t.getDbName(), t.getTableName(), true, true); + String[] names = Utilities.getDbTableName(tableName); + dropTable(names[0], names[1], true, true); } /** @@ -917,8 +917,7 @@ public HiveConf getConf() { * table doesn't exist */ public Table getTable(final String tableName) throws HiveException { - Table t = newTable(tableName); - return this.getTable(t.getDbName(), t.getTableName(), true); + return this.getTable(tableName, true); } /** @@ -930,8 +929,8 @@ public Table getTable(final String tableName) throws HiveException { * table doesn't exist */ public Table getTable(final String tableName, boolean throwException) throws HiveException { - Table t = newTable(tableName); - return this.getTable(t.getDbName(), t.getTableName(), throwException); + String[] names = Utilities.getDbTableName(tableName); + return this.getTable(names[0], names[1], throwException); } /** @@ -947,8 +946,8 @@ public Table getTable(final String tableName, boolean throwException) throws Hiv */ public Table getTable(final String dbName, final String tableName) throws HiveException { if (tableName.contains(".")) { - Table t = newTable(tableName); - return this.getTable(t.getDbName(), t.getTableName(), true); + String[] names = Utilities.getDbTableName(tableName); + return this.getTable(names[0], names[1], true); } else { return this.getTable(dbName, tableName, true); } @@ -1502,7 +1501,7 @@ public void loadTable(Path loadPath, String tableName, boolean replace, } } - /** + /** * Creates a partition. * * @param tbl @@ -1687,9 +1686,9 @@ public Partition getPartition(Table tbl, Map partSpec, } public boolean dropPartition(String tblName, List part_vals, boolean deleteData) - throws HiveException { - Table t = newTable(tblName); - return dropPartition(t.getDbName(), t.getTableName(), part_vals, deleteData); + throws HiveException { + String[] names = Utilities.getDbTableName(tblName); + return dropPartition(names[0], names[1], part_vals, deleteData); } public boolean dropPartition(String db_name, String tbl_name, @@ -1705,9 +1704,9 @@ public boolean dropPartition(String db_name, String tbl_name, public List dropPartitions(String tblName, List partSpecs, boolean deleteData, boolean ignoreProtection, boolean ifExists) throws HiveException { - Table t = newTable(tblName); + String[] names = Utilities.getDbTableName(tblName); return dropPartitions( - t.getDbName(), t.getTableName(), partSpecs, deleteData, ignoreProtection, ifExists); + names[0], names[1], partSpecs, deleteData, ignoreProtection, ifExists); } public List dropPartitions(String dbName, String tblName, @@ -1732,8 +1731,8 @@ public boolean dropPartition(String db_name, String tbl_name, } public List getPartitionNames(String tblName, short max) throws HiveException { - Table t = newTable(tblName); - return getPartitionNames(t.getDbName(), t.getTableName(), max); + String[] names = Utilities.getDbTableName(tblName); + return getPartitionNames(names[0], names[1], max); } public List getPartitionNames(String dbName, String tblName, short max) @@ -2210,7 +2209,7 @@ public boolean accept(Path p) { // on "_copy_N" where N starts at 1 and works its way up until // we find a free space. - // removed source file staging.. it's more confusing when faild. + // removed source file staging.. it's more confusing when failed. for (int counter = 1; fs.exists(itemDest) || destExists(result, itemDest); counter++) { itemDest = new Path(destf, name + ("_copy_" + counter) + filetype); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 399f92a..70ec577 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -98,7 +98,6 @@ import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; -import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; @@ -1221,10 +1220,8 @@ public void getMetaData(QB qb, ReadEntity parentInput) throws SemanticException for (String alias : tabAliases) { String tab_name = qb.getTabNameForAlias(alias); - Table tab = null; - try { - tab = db.getTable(tab_name); - } catch (InvalidTableException ite) { + Table tab = db.getTable(tab_name, false); + if (tab == null) { /* * if this s a CTE reference: * Add its AST as a SubQuery to this QB. @@ -1418,11 +1415,11 @@ public void getMetaData(QB qb, ReadEntity parentInput) throws SemanticException // allocate a temporary output dir on the location of the table String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - Table newTable = db.newTable(tableName); + String[] names = Utilities.getDbTableName(tableName); Path location; try { Warehouse wh = new Warehouse(conf); - location = wh.getDatabasePath(db.getDatabase(newTable.getDbName())); + location = wh.getDatabasePath(db.getDatabase(names[0])); } catch (MetaException e) { throw new SemanticException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index b9890af..a8d9a15 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -44,7 +44,6 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils; import org.apache.hadoop.hive.ql.plan.ColumnStatsDesc; import org.apache.hadoop.hive.ql.plan.ColumnStatsWork; @@ -170,17 +169,15 @@ public void compile(final ParseContext pCtx, final List