diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index 8105e8ba54..919ad7c3d2 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -348,8 +348,8 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive AlterTableDesc alterTable = work.getAlterTblDesc(); if (alterTable != null) { - Table table = hive.getTable(SessionState.get().getCurrentDatabase(), - Utilities.getDbTableName(alterTable.getOldName())[1], false); + final String tableName = Utilities.getTableName(alterTable.getOldName()).getTable(); + Table table = hive.getTable(SessionState.get().getCurrentDatabase(), tableName, false); Partition part = null; if (alterTable.getPartSpec() != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index cac14a6ab8..00ef210170 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -46,6 +46,7 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.common.ValidWriteIdList; @@ -1641,8 +1642,8 @@ private void acquireLocks() throws CommandProcessorResponse { fsd1.getDirName().compareTo(fsd2.getDirName())); for (FileSinkDesc desc : acidSinks) { TableDesc tableInfo = desc.getTableInfo(); - long writeId = queryTxnMgr.getTableWriteId(Utilities.getDatabaseName(tableInfo.getTableName()), - Utilities.getTableName(tableInfo.getTableName())); + final TableName tn = Utilities.getTableName(tableInfo.getTableName()); + long writeId = queryTxnMgr.getTableWriteId(tn.getDb(), tn.getTable()); desc.setTableWriteId(writeId); /** @@ -1671,9 +1672,8 @@ private void acquireLocks() throws CommandProcessorResponse { DDLDescWithWriteId acidDdlDesc = plan.getAcidDdlDesc(); boolean hasAcidDdl = acidDdlDesc != null && acidDdlDesc.mayNeedWriteId(); if (hasAcidDdl) { - String fqTableName = acidDdlDesc.getFullTableName(); - long writeId = queryTxnMgr.getTableWriteId( - Utilities.getDatabaseName(fqTableName), Utilities.getTableName(fqTableName)); + final TableName tn = Utilities.getTableName(acidDdlDesc.getFullTableName()); + long writeId = queryTxnMgr.getTableWriteId(tn.getDb(), tn.getTable()); acidDdlDesc.setWriteId(writeId); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index cb7fdf73b5..964184ebdb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -62,6 +62,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -1342,8 +1343,7 @@ private int renamePartition(Hive db, RenamePartitionDesc renamePartitionDesc) th return 0; } - String names[] = Utilities.getDbTableName(tableName); - if (Utils.isBootstrapDumpInProgress(db, names[0])) { + if (Utils.isBootstrapDumpInProgress(db, Utilities.getTableName(tableName).getDb())) { LOG.error("DDLTask: Rename Partition not allowed as bootstrap dump in progress"); throw new HiveException("Rename Partition: Not allowed as bootstrap dump in progress"); } @@ -2159,10 +2159,10 @@ private int msck(Hive db, MsckDesc msckDesc) { try { msck = new Msck( false, false); msck.init(db.getConf()); - String[] names = Utilities.getDbTableName(msckDesc.getTableName()); - MsckInfo msckInfo = new MsckInfo(SessionState.get().getCurrentCatalog(), names[0], - names[1], msckDesc.getPartSpecs(), msckDesc.getResFile(), - msckDesc.isRepairPartitions(), msckDesc.isAddPartitions(), msckDesc.isDropPartitions(), -1); + final TableName tableName = Utilities.getTableName(msckDesc.getTableName()); + MsckInfo msckInfo = new MsckInfo(SessionState.get().getCurrentCatalog(), tableName.getDb(), tableName.getTable(), + msckDesc.getPartSpecs(), msckDesc.getResFile(), msckDesc.isRepairPartitions(), msckDesc.isAddPartitions(), + msckDesc.isDropPartitions(), -1); return msck.repair(msckInfo); } catch (MetaException e) { LOG.error("Unable to create msck instance.", e); @@ -3465,7 +3465,8 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException, // when column name is specified in describe table DDL, colPath will // will be table_name.column_name String colName = colPath.split("\\.")[1]; - String[] dbTab = Utilities.getDbTableName(tableName); + final TableName tNameObj = Utilities.getTableName(tableName); + String[] dbTabLower = new String[] { tNameObj.getDb().toLowerCase(), tNameObj.getTable().toLowerCase() }; List colNames = new ArrayList(); colNames.add(colName.toLowerCase()); if (null == part) { @@ -3487,9 +3488,8 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException, StatsSetupConst.setColumnStatsState(tblProps, colNames); } else { cols = Hive.getFieldsFromDeserializer(colPath, deserializer); - List parts = db.getPartitionNames(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), (short) -1); - AggrStats aggrStats = db.getAggrColStatsFor( - dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, parts, false); + List parts = db.getPartitionNames(dbTabLower[0], dbTabLower[1], (short) -1); + AggrStats aggrStats = db.getAggrColStatsFor(dbTabLower[0], dbTabLower[1], colNames, parts, false); colStats = aggrStats.getColStats(); if (parts.size() == aggrStats.getPartsFound()) { StatsSetupConst.setColumnStatsState(tblProps, colNames); @@ -3500,15 +3500,14 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException, tbl.setParameters(tblProps); } else { cols = Hive.getFieldsFromDeserializer(colPath, deserializer); - colStats = db.getTableColumnStatistics( - dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, false); + colStats = db.getTableColumnStatistics(dbTabLower[0], dbTabLower[1], colNames, false); } } else { List partitions = new ArrayList(); partitions.add(part.getName()); cols = Hive.getFieldsFromDeserializer(colPath, deserializer); - colStats = db.getPartitionColumnStatistics(dbTab[0].toLowerCase(), - dbTab[1].toLowerCase(), partitions, colNames, false).get(part.getName()); + colStats = db.getPartitionColumnStatistics(dbTabLower[0], dbTabLower[1], partitions, colNames, false) + .get(part.getName()); } } else { cols = Hive.getFieldsFromDeserializer(colPath, deserializer); @@ -3681,8 +3680,7 @@ static StringBuilder appendNonNull(StringBuilder builder, Object value, boolean */ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) { - String names[] = Utilities.getDbTableName(alterTbl.getOldName()); - if (Utils.isBootstrapDumpInProgress(db, names[0])) { + if (Utils.isBootstrapDumpInProgress(db, Utilities.getTableName(alterTbl.getOldName()).getDb())) { LOG.error("DDLTask: Rename Table not allowed as bootstrap dump in progress"); throw new HiveException("Rename Table: Not allowed as bootstrap dump in progress"); } @@ -3842,8 +3840,9 @@ private static StorageDescriptor retrieveStorageDescriptor(Table tbl, Partition } if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) { - tbl.setDbName(Utilities.getDatabaseName(alterTbl.getNewName())); - tbl.setTableName(Utilities.getTableName(alterTbl.getNewName())); + final TableName tn = Utilities.getTableName(alterTbl.getNewName()); + tbl.setDbName(tn.getDb()); + tbl.setTableName(tn.getTable()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDCOLS) { StorageDescriptor sd = retrieveStorageDescriptor(tbl, part); String serializationLib = sd.getSerdeInfo().getSerializationLib(); @@ -4300,9 +4299,8 @@ private void checkMmLb(Partition part) throws HiveException { private int dropConstraint(Hive db, AlterTableDesc alterTbl) throws SemanticException, HiveException { try { - db.dropConstraint(Utilities.getDatabaseName(alterTbl.getOldName()), - Utilities.getTableName(alterTbl.getOldName()), - alterTbl.getConstraintName()); + final TableName tn = Utilities.getTableName(alterTbl.getOldName()); + db.dropConstraint(tn.getDb(), tn.getTable(), alterTbl.getConstraintName()); } catch (NoSuchObjectException e) { throw new HiveException(e); } @@ -4843,10 +4841,10 @@ private int createTableLike(Hive db, CreateTableLikeDesc crtTbl) throws Exceptio // find out database name and table name of target table String targetTableName = crtTbl.getTableName(); - String[] names = Utilities.getDbTableName(targetTableName); - tbl.setDbName(names[0]); - tbl.setTableName(names[1]); + final TableName tableName = Utilities.getTableName(targetTableName); + tbl.setDbName(tableName.getDb()); + tbl.setTableName(tableName.getTable()); // using old table object, hence reset the owner to current user for new table. tbl.setOwner(SessionState.getUserFromAuthenticator()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 61e34308bc..9260d52960 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -98,6 +98,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.StringInternUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -2211,59 +2212,19 @@ public static String formatBinaryString(byte[] array, int start, int length) { } /** - * Extract db and table name from dbtable string, where db and table are separated by "." - * If there is no db name part, set the current sessions default db - * @param dbtable - * @return String array with two elements, first is db name, second is table name - * @throws HiveException - */ - public static String[] getDbTableName(String dbtable) throws SemanticException { - return getDbTableName(SessionState.get().getCurrentDatabase(), dbtable); - } - - public static String[] getDbTableName(String defaultDb, String dbtable) throws SemanticException { - if (dbtable == null) { - return new String[2]; - } - String[] names = dbtable.split("\\."); - switch (names.length) { - case 2: - return names; - case 1: - return new String [] {defaultDb, dbtable}; - default: - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, dbtable); - } - } - - /** - * Accepts qualified name which is in the form of dbname.tablename and returns dbname from it - * - * @param dbTableName - * @return dbname - * @throws SemanticException input string is not qualified name - */ - public static String getDatabaseName(String dbTableName) throws SemanticException { - String[] split = dbTableName.split("\\."); - if (split.length != 2) { - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, dbTableName); - } - return split[0]; - } - - /** - * Accepts qualified name which is in the form of dbname.tablename and returns tablename from it + * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a {@link TableName} * - * @param dbTableName - * @return tablename - * @throws SemanticException input string is not qualified name + * @param dbTableName, not null + * @return a {@link TableName} + * @throws SemanticException if dbTableName is null */ - public static String getTableName(String dbTableName) throws SemanticException { - String[] split = dbTableName.split("\\."); - if (split.length != 2) { - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, dbTableName); + public static TableName getTableName(String dbTableName) throws SemanticException { + try { + return TableName + .fromString(dbTableName, SessionState.get().getCurrentCatalog(), SessionState.get().getCurrentDatabase()); + } catch (IllegalArgumentException e) { + throw new SemanticException(e.getCause()); } - return split[1]; } public static void validateColumnNames(List colNames, List checkCols) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 5cfd0a853a..ad77167563 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -126,6 +126,7 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveAugmentMaterializationRule; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; @@ -655,15 +656,15 @@ public void alterTable(Table newTbl, boolean cascade, EnvironmentContext environ public void alterTable(String fullyQlfdTblName, Table newTbl, EnvironmentContext environmentContext, boolean transactional) throws HiveException { - String[] names = Utilities.getDbTableName(fullyQlfdTblName); - alterTable(null, names[0], names[1], newTbl, false, environmentContext, transactional); + final TableName tName = Utilities.getTableName(fullyQlfdTblName); + alterTable(tName.getCat(), tName.getDb(), tName.getTable(), newTbl, false, environmentContext, transactional); } public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade, EnvironmentContext environmentContext, boolean transactional) throws HiveException { - String[] names = Utilities.getDbTableName(fullyQlfdTblName); - alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional); + final TableName tName = Utilities.getTableName(fullyQlfdTblName); + alterTable(tName.getCat(), tName.getDb(), tName.getTable(), newTbl, cascade, environmentContext, transactional); } public void alterTable(String catName, String dbName, String tblName, Table newTbl, boolean cascade, @@ -743,8 +744,8 @@ public void updateCreationMetadata(String dbName, String tableName, CreationMeta public void alterPartition(String tblName, Partition newPart, EnvironmentContext environmentContext, boolean transactional) throws InvalidOperationException, HiveException { - String[] names = Utilities.getDbTableName(tblName); - alterPartition(null, names[0], names[1], newPart, environmentContext, transactional); + final TableName tn = Utilities.getTableName(tblName); + alterPartition(tn.getCat(), tn.getDb(), tn.getTable(), newPart, environmentContext, transactional); } /** @@ -824,7 +825,7 @@ private void validatePartition(Partition newPart) throws HiveException { public void alterPartitions(String tblName, List newParts, EnvironmentContext environmentContext, boolean transactional) throws InvalidOperationException, HiveException { - String[] names = Utilities.getDbTableName(tblName); + final TableName tn = Utilities.getTableName(tblName); List newTParts = new ArrayList(); try { @@ -844,7 +845,7 @@ public void alterPartitions(String tblName, List newParts, } newTParts.add(tmpPart.getTPartition()); } - getMSC().alter_partitions(names[0], names[1], newTParts, environmentContext, + getMSC().alter_partitions(tn.getDb(), tn.getTable(), newTParts, environmentContext, tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null, tableSnapshot != null ? tableSnapshot.getWriteId() : -1); } catch (MetaException e) { @@ -1042,8 +1043,8 @@ public void createTable(Table tbl, boolean ifNotExists) throws HiveException { * thrown if the drop fails */ public void dropTable(String tableName, boolean ifPurge) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - dropTable(names[0], names[1], true, true, ifPurge); + final TableName tn = Utilities.getTableName(tableName); + dropTable(tn.getDb(), tn.getTable(), true, true, ifPurge); } /** @@ -1184,8 +1185,8 @@ public Table getTable(final String tableName) throws HiveException { * table doesn't exist */ public Table getTable(final String tableName, boolean throwException) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - return this.getTable(names[0], names[1], throwException); + final TableName tn = Utilities.getTableName(tableName); + return this.getTable(tn.getDb(), tn.getTable(), throwException); } /** @@ -1200,13 +1201,14 @@ public Table getTable(final String tableName, boolean throwException) throws Hiv * if there's an internal error or if the table doesn't exist */ public Table getTable(final String dbName, final String tableName) throws HiveException { - // TODO: catalog... etc everywhere - if (tableName.contains(".")) { - String[] names = Utilities.getDbTableName(tableName); - return this.getTable(names[0], names[1], true); - } else { - return this.getTable(dbName, tableName, true); + final TableName tn; + try { + tn = TableName.fromString(tableName, SessionState.get().getCurrentCatalog(), dbName); + } catch (IllegalArgumentException e) { + throw new SemanticException(e.getCause()); } + // TODO: catalog... etc everywhere + return this.getTable(tn.getDb(), tn.getTable(), true); } /** @@ -1525,10 +1527,10 @@ public Table apply(org.apache.hadoop.hive.metastore.api.Table table) { } } - public List getValidMaterializedView(String dbName, String materializedViewName, - List tablesUsed, boolean forceMVContentsUpToDate, HiveTxnManager txnMgr) throws HiveException { - return getValidMaterializedViews(dbName, ImmutableList.of(materializedViewName), - tablesUsed, forceMVContentsUpToDate, txnMgr); + public List getValidMaterializedView(TableName tableName, List tablesUsed, + boolean forceMVContentsUpToDate, HiveTxnManager txnMgr) throws HiveException { + return getValidMaterializedViews(tableName.getDb(), ImmutableList.of(tableName.getTable()), tablesUsed, + forceMVContentsUpToDate, txnMgr); } private List getValidMaterializedViews(String dbName, List materializedViewNames, @@ -3149,10 +3151,6 @@ private void alterPartitionSpec(Table tbl, String partPath) throws HiveException, InvalidOperationException { alterPartitionSpecInMemory(tbl, partSpec, tpart, inheritTableSpecs, partPath); - String fullName = tbl.getTableName(); - if (!org.apache.commons.lang.StringUtils.isEmpty(tbl.getDbName())) { - fullName = tbl.getFullyQualifiedName(); - } alterPartition(tbl.getCatalogName(), tbl.getDbName(), tbl.getTableName(), new Partition(tbl, tpart), null, true); } @@ -3317,8 +3315,8 @@ private static void addInsertNonDirectoryInformation(Path p, FileSystem fileSyst public boolean dropPartition(String tblName, List part_vals, boolean deleteData) throws HiveException { - String[] names = Utilities.getDbTableName(tblName); - return dropPartition(names[0], names[1], part_vals, deleteData); + final TableName tn = Utilities.getTableName(tblName); + return dropPartition(tn.getDb(), tn.getTable(), part_vals, deleteData); } public boolean dropPartition(String db_name, String tbl_name, @@ -3409,20 +3407,17 @@ public boolean dropPartition(String dbName, String tableName, List partV ++partSpecKey; } - String[] names = Utilities.getDbTableName(table.getFullyQualifiedName()); - return dropPartitions(names[0], names[1], partSpecs, deleteData, ifExists); + return dropPartitions(Utilities.getTableName(table.getFullyQualifiedName()), partSpecs, deleteData, ifExists); } public List dropPartitions(String tblName, List partSpecs, boolean deleteData, boolean ifExists) throws HiveException { - String[] names = Utilities.getDbTableName(tblName); - return dropPartitions(names[0], names[1], partSpecs, deleteData, ifExists); + return dropPartitions(Utilities.getTableName(tblName), partSpecs, deleteData, ifExists); } - public List dropPartitions(String dbName, String tblName, - List partSpecs, boolean deleteData, + public List dropPartitions(TableName tableName, List partSpecs, boolean deleteData, boolean ifExists) throws HiveException { - return dropPartitions(dbName, tblName, partSpecs, + return dropPartitions(tableName, partSpecs, PartitionDropOptions.instance() .deleteData(deleteData) .ifExists(ifExists)); @@ -3430,22 +3425,21 @@ public boolean dropPartition(String dbName, String tableName, List partV public List dropPartitions(String tblName, List partSpecs, PartitionDropOptions dropOptions) throws HiveException { - String[] names = Utilities.getDbTableName(tblName); - return dropPartitions(names[0], names[1], partSpecs, dropOptions); + return dropPartitions(Utilities.getTableName(tblName), partSpecs, dropOptions); } - public List dropPartitions(String dbName, String tblName, - List partSpecs, PartitionDropOptions dropOptions) throws HiveException { + public List dropPartitions(TableName tableName, List partSpecs, + PartitionDropOptions dropOptions) throws HiveException { try { - Table tbl = getTable(dbName, tblName); + Table tbl = getTable(tableName.getDb(), tableName.getTable()); List> partExprs = new ArrayList<>(partSpecs.size()); for (DropTableDesc.PartSpec partSpec : partSpecs) { partExprs.add(new org.apache.hadoop.hive.metastore.utils.ObjectPair<>(partSpec.getPrefixLength(), SerializationUtilities.serializeExpressionToKryo(partSpec.getPartSpec()))); } - List tParts = getMSC().dropPartitions( - dbName, tblName, partExprs, dropOptions); + List tParts = + getMSC().dropPartitions(tableName.getDb(), tableName.getTable(), partExprs, dropOptions); return convertFromMetastore(tbl, tParts); } catch (NoSuchObjectException e) { throw new HiveException("Partition or table doesn't exist.", e); @@ -3454,13 +3448,7 @@ public boolean dropPartition(String dbName, String tableName, List partV } } - public List getPartitionNames(String tblName, short max) throws HiveException { - String[] names = Utilities.getDbTableName(tblName); - return getPartitionNames(names[0], names[1], max); - } - - public List getPartitionNames(String dbName, String tblName, short max) - throws HiveException { + public List getPartitionNames(String dbName, String tblName, short max) throws HiveException { List names = null; try { names = getMSC().listPartitionNames(dbName, tblName, max); @@ -5138,8 +5126,8 @@ public boolean deletePartitionColumnStatistics(String dbName, String tableName, } public Table newTable(String tableName) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - return new Table(names[0], names[1]); + final TableName tn = Utilities.getTableName(tableName); + return new Table(tn.getDb(), tn.getTable()); } public String getDelegationToken(String owner, String renewer) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java index 1207be3028..58a657ad9a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java @@ -20,10 +20,10 @@ import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.session.SessionState; public class AnalyzeCommandUtils { public static boolean isPartitionLevelStats(ASTNode tree) { @@ -40,9 +40,8 @@ public static boolean isPartitionLevelStats(ASTNode tree) { public static Table getTable(ASTNode tree, BaseSemanticAnalyzer sa) throws SemanticException { String tableName = ColumnStatsSemanticAnalyzer.getUnescapedName((ASTNode) tree.getChild(0).getChild(0)); - String currentDb = SessionState.get().getCurrentDatabase(); - String [] names = Utilities.getDbTableName(currentDb, tableName); - return sa.getTable(names[0], names[1], true); + TableName tn = Utilities.getTableName(tableName); + return sa.getTable(tn.getDb(), tn.getTable(), true); } public static Map getPartKeyValuePairsFromAST(Table tbl, ASTNode tree, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index e6779b24a5..130baebc8b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -38,11 +38,11 @@ import org.antlr.runtime.TokenRewriteStream; import org.antlr.runtime.tree.Tree; -import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; @@ -414,32 +414,42 @@ public static String getUnescapedName(ASTNode tableOrColumnNode, String currentD return unescapeIdentifier(tableOrColumnNode.getText()); } - public static String[] getQualifiedTableName(ASTNode tabNameNode) throws SemanticException { - if (tabNameNode.getType() != HiveParser.TOK_TABNAME || - (tabNameNode.getChildCount() != 1 && tabNameNode.getChildCount() != 2)) { + /** + * Get the name reference of a DB table node. + * @param tabNameNode + * @return a {@link TableName}, not null. The catalog will be missing from this. + * @throws SemanticException + */ + public static TableName getQualifiedTableName(ASTNode tabNameNode) throws SemanticException { + // TODO: Ideally this would be removed, once the catalog is accessible in all use cases + return getQualifiedTableName(tabNameNode, null); + } + + /** + * Get the name reference of a DB table node. + * @param tabNameNode + * @param catalogName the catalog of the DB/object + * @return a {@link TableName}, not null. The catalog will be missing from this. + * @throws SemanticException + */ + public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalogName) throws SemanticException { + if (tabNameNode.getType() != HiveParser.TOK_TABNAME || (tabNameNode.getChildCount() != 1 + && tabNameNode.getChildCount() != 2)) { throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME.getMsg(tabNameNode)); } if (tabNameNode.getChildCount() == 2) { - String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); - String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); + final String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + final String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); if (dbName.contains(".") || tableName.contains(".")) { throw new SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tabNameNode)); } - return new String[] {dbName, tableName}; + return TableName.fromString(tableName, catalogName, dbName); } - String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + final String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); if (tableName.contains(".")) { throw new SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tabNameNode)); } - return Utilities.getDbTableName(tableName); - } - - public static String getDotName(String[] qname) throws SemanticException { - String genericName = StringUtils.join(qname, "."); - if (qname.length != 2) { - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, genericName); - } - return genericName; + return TableName.fromString(tableName, catalogName, SessionState.get().getCurrentDatabase()); } /** @@ -704,112 +714,109 @@ private static String spliceString(String str, int i, int length, String replace /** * Process the primary keys from the ast node and populate the SQLPrimaryKey list. */ - protected static void processPrimaryKeys(String databaseName, String tableName, - ASTNode child, List primaryKeys) throws SemanticException { + protected static void processPrimaryKeys(TableName tName, ASTNode child, List primaryKeys) + throws SemanticException { List primaryKeyInfos = new ArrayList(); generateConstraintInfos(child, primaryKeyInfos); - constraintInfosToPrimaryKeys(databaseName, tableName, primaryKeyInfos, primaryKeys); + constraintInfosToPrimaryKeys(tName, primaryKeyInfos, primaryKeys); } - protected static void processPrimaryKeys(String databaseName, String tableName, - ASTNode child, List columnNames, List primaryKeys) - throws SemanticException { + protected static void processPrimaryKeys(TableName tName, ASTNode child, List columnNames, + List primaryKeys) throws SemanticException { List primaryKeyInfos = new ArrayList(); generateConstraintInfos(child, columnNames, primaryKeyInfos, null, null); - constraintInfosToPrimaryKeys(databaseName, tableName, primaryKeyInfos, primaryKeys); + constraintInfosToPrimaryKeys(tName, primaryKeyInfos, primaryKeys); } - private static void constraintInfosToPrimaryKeys(String databaseName, String tableName, - List primaryKeyInfos, List primaryKeys) { + private static void constraintInfosToPrimaryKeys(TableName tName, List primaryKeyInfos, + List primaryKeys) { + // TODO: null check and throw int i = 1; for (ConstraintInfo primaryKeyInfo : primaryKeyInfos) { - primaryKeys.add(new SQLPrimaryKey(databaseName, tableName, primaryKeyInfo.colName, - i++, primaryKeyInfo.constraintName, primaryKeyInfo.enable, - primaryKeyInfo.validate, primaryKeyInfo.rely)); + primaryKeys.add( + new SQLPrimaryKey(tName.getDb(), tName.getTable(), primaryKeyInfo.colName, i++, primaryKeyInfo.constraintName, + primaryKeyInfo.enable, primaryKeyInfo.validate, primaryKeyInfo.rely)); } } /** * Process the unique constraints from the ast node and populate the SQLUniqueConstraint list. */ - protected static void processUniqueConstraints(String catName, String databaseName, String tableName, - ASTNode child, List uniqueConstraints) throws SemanticException { + protected static void processUniqueConstraints(TableName tName, ASTNode child, + List uniqueConstraints) throws SemanticException { List uniqueInfos = new ArrayList(); generateConstraintInfos(child, uniqueInfos); - constraintInfosToUniqueConstraints(catName, databaseName, tableName, uniqueInfos, uniqueConstraints); + constraintInfosToUniqueConstraints(tName, uniqueInfos, uniqueConstraints); } - protected static void processUniqueConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, List uniqueConstraints) - throws SemanticException { + protected static void processUniqueConstraints(TableName tName, ASTNode child, List columnNames, + List uniqueConstraints) throws SemanticException { List uniqueInfos = new ArrayList(); generateConstraintInfos(child, columnNames, uniqueInfos, null, null); - constraintInfosToUniqueConstraints(catName, databaseName, tableName, uniqueInfos, uniqueConstraints); + constraintInfosToUniqueConstraints(tName, uniqueInfos, uniqueConstraints); } - private static void constraintInfosToUniqueConstraints(String catName, String databaseName, String tableName, - List uniqueInfos, List uniqueConstraints) { + private static void constraintInfosToUniqueConstraints(TableName tName, List uniqueInfos, + List uniqueConstraints) { + // TODO: null check and throw int i = 1; for (ConstraintInfo uniqueInfo : uniqueInfos) { - uniqueConstraints.add(new SQLUniqueConstraint(catName, databaseName, tableName, uniqueInfo.colName, - i++, uniqueInfo.constraintName, uniqueInfo.enable, uniqueInfo.validate, uniqueInfo.rely)); + uniqueConstraints.add( + new SQLUniqueConstraint(tName.getCat(), tName.getDb(), tName.getTable(), uniqueInfo.colName, i++, + uniqueInfo.constraintName, uniqueInfo.enable, uniqueInfo.validate, uniqueInfo.rely)); } } - protected static void processCheckConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, - List checkConstraints, final ASTNode typeChild, - final TokenRewriteStream tokenRewriteStream) + protected static void processCheckConstraints(TableName tName, ASTNode child, List columnNames, + List checkConstraints, final ASTNode typeChild, final TokenRewriteStream tokenRewriteStream) throws SemanticException { List checkInfos = new ArrayList(); generateConstraintInfos(child, columnNames, checkInfos, typeChild, tokenRewriteStream); - constraintInfosToCheckConstraints(catName, databaseName, tableName, checkInfos, checkConstraints); + constraintInfosToCheckConstraints(tName, checkInfos, checkConstraints); } - private static void constraintInfosToCheckConstraints(String catName, String databaseName, String tableName, - List checkInfos, - List checkConstraints) { + private static void constraintInfosToCheckConstraints(TableName tName, List checkInfos, + List checkConstraints) { + // TODO: null check and throw for (ConstraintInfo checkInfo : checkInfos) { - checkConstraints.add(new SQLCheckConstraint(catName, databaseName, tableName, checkInfo.colName, - checkInfo.defaultValue, checkInfo.constraintName, checkInfo.enable, - checkInfo.validate, checkInfo.rely)); + checkConstraints.add(new SQLCheckConstraint(tName.getCat(), tName.getDb(), tName.getTable(), checkInfo.colName, + checkInfo.defaultValue, checkInfo.constraintName, checkInfo.enable, checkInfo.validate, checkInfo.rely)); } } - protected static void processDefaultConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, List defaultConstraints, final ASTNode typeChild, - final TokenRewriteStream tokenRewriteStream) - throws SemanticException { + protected static void processDefaultConstraints(TableName tName, ASTNode child, List columnNames, + List defaultConstraints, final ASTNode typeChild, + final TokenRewriteStream tokenRewriteStream) throws SemanticException { List defaultInfos = new ArrayList(); generateConstraintInfos(child, columnNames, defaultInfos, typeChild, tokenRewriteStream); - constraintInfosToDefaultConstraints(catName, databaseName, tableName, defaultInfos, defaultConstraints); + constraintInfosToDefaultConstraints(tName, defaultInfos, defaultConstraints); } - private static void constraintInfosToDefaultConstraints( - String catName, String databaseName, String tableName, - List defaultInfos, List defaultConstraints) { + private static void constraintInfosToDefaultConstraints(TableName tName, List defaultInfos, + List defaultConstraints) { + // TODO: null check and throw for (ConstraintInfo defaultInfo : defaultInfos) { - defaultConstraints.add(new SQLDefaultConstraint(catName, databaseName, tableName, - defaultInfo.colName, defaultInfo.defaultValue, defaultInfo.constraintName, - defaultInfo.enable, defaultInfo.validate, defaultInfo.rely)); + defaultConstraints.add( + new SQLDefaultConstraint(tName.getCat(), tName.getDb(), tName.getTable(), defaultInfo.colName, + defaultInfo.defaultValue, defaultInfo.constraintName, defaultInfo.enable, defaultInfo.validate, + defaultInfo.rely)); } } - protected static void processNotNullConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, List notNullConstraints) - throws SemanticException { + protected static void processNotNullConstraints(TableName tName, ASTNode child, List columnNames, + List notNullConstraints) throws SemanticException { List notNullInfos = new ArrayList(); generateConstraintInfos(child, columnNames, notNullInfos, null, null); - constraintInfosToNotNullConstraints(catName, databaseName, tableName, notNullInfos, notNullConstraints); + constraintInfosToNotNullConstraints(tName, notNullInfos, notNullConstraints); } - private static void constraintInfosToNotNullConstraints( - String catName, String databaseName, String tableName, List notNullInfos, + private static void constraintInfosToNotNullConstraints(TableName tName, List notNullInfos, List notNullConstraints) { + // TODO: null check and throw for (ConstraintInfo notNullInfo : notNullInfos) { - notNullConstraints.add(new SQLNotNullConstraint(catName, databaseName, tableName, - notNullInfo.colName, notNullInfo.constraintName, notNullInfo.enable, notNullInfo.validate, - notNullInfo.rely)); + notNullConstraints.add( + new SQLNotNullConstraint(tName.getCat(), tName.getDb(), tName.getTable(), notNullInfo.colName, + notNullInfo.constraintName, notNullInfo.enable, notNullInfo.validate, notNullInfo.rely)); } } @@ -1094,12 +1101,13 @@ else if(child.getToken().getType() == HiveParser.TOK_CHECK_CONSTRAINT) { /** * Process the foreign keys from the AST and populate the foreign keys in the SQLForeignKey list + * @param tName catalog/db/table name reference * @param child Foreign Key token node * @param foreignKeys SQLForeignKey list * @throws SemanticException */ - protected static void processForeignKeys(String databaseName, String tableName, - ASTNode child, List foreignKeys) throws SemanticException { + protected static void processForeignKeys(TableName tName, ASTNode child, List foreignKeys) + throws SemanticException { // The ANTLR grammar looks like : // 1. KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList // KW_REFERENCES tabName=tableName parCols=columnParenthesesList @@ -1157,16 +1165,16 @@ protected static void processForeignKeys(String databaseName, String tableName, " The number of foreign key columns should be same as number of parent key columns ")); } - String[] parentDBTbl = getQualifiedTableName((ASTNode) child.getChild(ptIndex)); + final TableName parentTblName = getQualifiedTableName((ASTNode) child.getChild(ptIndex)); for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) { SQLForeignKey sqlForeignKey = new SQLForeignKey(); - sqlForeignKey.setFktable_db(databaseName); - sqlForeignKey.setFktable_name(tableName); + sqlForeignKey.setFktable_db(tName.getDb()); + sqlForeignKey.setFktable_name(tName.getTable()); Tree fkgrandChild = child.getChild(fkIndex).getChild(j); checkColumnName(fkgrandChild.getText()); sqlForeignKey.setFkcolumn_name(unescapeIdentifier(fkgrandChild.getText().toLowerCase())); - sqlForeignKey.setPktable_db(parentDBTbl[0]); - sqlForeignKey.setPktable_name(parentDBTbl[1]); + sqlForeignKey.setPktable_db(parentTblName.getDb()); + sqlForeignKey.setPktable_name(parentTblName.getTable()); Tree pkgrandChild = child.getChild(pkIndex).getChild(j); sqlForeignKey.setPkcolumn_name(unescapeIdentifier(pkgrandChild.getText().toLowerCase())); sqlForeignKey.setKey_seq(j+1); @@ -1221,34 +1229,33 @@ private static void checkColumnName(String columnName) throws SemanticException ASTNode child = (ASTNode) ast.getChild(i); switch (child.getToken().getType()) { case HiveParser.TOK_UNIQUE: { - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + final TableName tName = + getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); - processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], child, - uniqueConstraints); + processUniqueConstraints(tName, child, uniqueConstraints); } break; case HiveParser.TOK_PRIMARY_KEY: { if (!primaryKeys.isEmpty()) { - throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg( - "Cannot exist more than one primary key definition for the same table")); + throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT + .getMsg("Cannot exist more than one primary key definition for the same table")); } - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys); + final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0)); + processPrimaryKeys(tName, child, primaryKeys); } break; case HiveParser.TOK_FOREIGN_KEY: { - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys); + final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0)); + processForeignKeys(tName, child, foreignKeys); } break; case HiveParser.TOK_CHECK_CONSTRAINT: { + final TableName tName = + getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], child, null, + processCheckConstraints(tName, child, null, checkConstraints, null, tokenRewriteStream); } break; @@ -1279,39 +1286,35 @@ private static void checkColumnName(String columnName) throws SemanticException constraintChild = (ASTNode) child.getChild(2); } if (constraintChild != null) { - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + final TableName tName = + getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); // Process column constraint switch (constraintChild.getToken().getType()) { case HiveParser.TOK_CHECK_CONSTRAINT: - processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), checkConstraints, typeChild, - tokenRewriteStream); + processCheckConstraints(tName, constraintChild, ImmutableList.of(col.getName()), checkConstraints, + typeChild, tokenRewriteStream); break; case HiveParser.TOK_DEFAULT_VALUE: - processDefaultConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream); + processDefaultConstraints(tName, constraintChild, ImmutableList.of(col.getName()), defaultConstraints, + typeChild, tokenRewriteStream); break; case HiveParser.TOK_NOT_NULL: - processNotNullConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), notNullConstraints); + processNotNullConstraints(tName, constraintChild, ImmutableList.of(col.getName()), notNullConstraints); break; case HiveParser.TOK_UNIQUE: - processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), uniqueConstraints); + processUniqueConstraints(tName, constraintChild, ImmutableList.of(col.getName()), uniqueConstraints); break; case HiveParser.TOK_PRIMARY_KEY: if (!primaryKeys.isEmpty()) { - throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg( - "Cannot exist more than one primary key definition for the same table")); + throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT + .getMsg("Cannot exist more than one primary key definition for the same table")); } - processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), primaryKeys); + processPrimaryKeys(tName, constraintChild, ImmutableList.of(col.getName()), primaryKeys); break; case HiveParser.TOK_FOREIGN_KEY: - processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, + processForeignKeys(tName, constraintChild, foreignKeys); break; default: @@ -2172,12 +2175,12 @@ protected Database getDatabase(String dbName, boolean throwException) throws Sem return database; } - protected Table getTable(String[] qualified) throws SemanticException { - return getTable(qualified[0], qualified[1], true); + protected Table getTable(TableName tn) throws SemanticException { + return getTable(tn, true); } - protected Table getTable(String[] qualified, boolean throwException) throws SemanticException { - return getTable(qualified[0], qualified[1], throwException); + protected Table getTable(TableName tn, boolean throwException) throws SemanticException { + return getTable(tn.getDb(), tn.getTable(), throwException); } protected Table getTable(String tblName) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index f5a1c74671..088ac46e0f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -2185,8 +2185,7 @@ private RelNode applyMaterializedViewRewriting(RelOptPlanner planner, RelNode ba // We only retrieve the materialization corresponding to the rebuild. In turn, // we pass 'true' for the forceMVContentsUpToDate parameter, as we cannot allow the // materialization contents to be stale for a rebuild if we want to use it. - materializations = db.getValidMaterializedView(mvRebuildDbName, mvRebuildName, - getTablesUsed(basePlan), true, getTxnMgr()); + materializations = db.getValidMaterializedView(mvRebuildName, getTablesUsed(basePlan), true, getTxnMgr()); } else { // This is not a rebuild, we retrieve all the materializations. In turn, we do not need // to force the materialization contents to be up-to-date, as this is not a rebuild, and diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index adfa4316c2..004bb3f6ec 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; @@ -281,11 +282,10 @@ public void analyzeInternal(ASTNode input) throws SemanticException { switch (ast.getType()) { case HiveParser.TOK_ALTERTABLE: { ast = (ASTNode) input.getChild(1); - String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0)); + final TableName tName = + getQualifiedTableName((ASTNode) input.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); - String tableName = getDotName(qualified); HashMap partSpec = null; ASTNode partSpecNode = (ASTNode)input.getChild(2); if (partSpecNode != null) { @@ -295,70 +295,70 @@ public void analyzeInternal(ASTNode input) throws SemanticException { if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) { partSpec = getPartSpec(partSpecNode); } else { - partSpec = getValidatedPartSpec(getTable(tableName), partSpecNode, conf, false); + partSpec = getValidatedPartSpec(getTable(tName.getTable()), partSpecNode, conf, false); } } if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAME) { - analyzeAlterTableRename(qualified, ast, false); + analyzeAlterTableRename(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { - analyzeAlterTableTouch(qualified, ast); + analyzeAlterTableTouch(tName, ast); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ARCHIVE) { - analyzeAlterTableArchive(qualified, ast, false); + analyzeAlterTableArchive(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { - analyzeAlterTableArchive(qualified, ast, true); + analyzeAlterTableArchive(tName, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { - analyzeAlterTableModifyCols(qualified, ast, partSpec, AlterTableTypes.ADDCOLS); + analyzeAlterTableModifyCols(tName, ast, partSpec, AlterTableTypes.ADDCOLS); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { - analyzeAlterTableModifyCols(qualified, ast, partSpec, AlterTableTypes.REPLACECOLS); + analyzeAlterTableModifyCols(tName, ast, partSpec, AlterTableTypes.REPLACECOLS); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { - analyzeAlterTableRenameCol(catName, qualified, ast, partSpec); + analyzeAlterTableRenameCol(tName, ast, partSpec); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) { - analyzeAlterTableAddParts(qualified, ast, false); + analyzeAlterTableAddParts(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) { - analyzeAlterTableDropParts(qualified, ast, false); + analyzeAlterTableDropParts(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PARTCOLTYPE) { - analyzeAlterTablePartColType(qualified, ast); + analyzeAlterTablePartColType(tName, ast); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) { - analyzeAlterTableProps(qualified, null, ast, false, false); + analyzeAlterTableProps(tName, null, ast, false, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPROPERTIES) { - analyzeAlterTableProps(qualified, null, ast, false, true); + analyzeAlterTableProps(tName, null, ast, false, true); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UPDATESTATS) { - analyzeAlterTableProps(qualified, partSpec, ast, false, false); + analyzeAlterTableProps(tName, partSpec, ast, false, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_SKEWED) { - analyzeAltertableSkewedby(qualified, ast); + analyzeAltertableSkewedby(tName, ast); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION) { - analyzeExchangePartition(qualified, ast); + analyzeExchangePartition(tName, ast); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { - analyzeAlterTableFileFormat(ast, tableName, partSpec); + analyzeAlterTableFileFormat(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_LOCATION) { - analyzeAlterTableLocation(ast, tableName, partSpec); + analyzeAlterTableLocation(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_MERGEFILES) { - analyzeAlterTablePartMergeFiles(ast, tableName, partSpec); + analyzeAlterTablePartMergeFiles(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) { - analyzeAlterTableSerde(ast, tableName, partSpec); + analyzeAlterTableSerde(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) { - analyzeAlterTableSerdeProps(ast, tableName, partSpec); + analyzeAlterTableSerdeProps(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) { - analyzeAlterTableRenamePart(ast, tableName, partSpec); + analyzeAlterTableRenamePart(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION) { - analyzeAlterTableSkewedLocation(ast, tableName, partSpec); + analyzeAlterTableSkewedLocation(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_BUCKETS) { - analyzeAlterTableBucketNum(ast, tableName, partSpec); + analyzeAlterTableBucketNum(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) { - analyzeAlterTableClusterSort(ast, tableName, partSpec); + analyzeAlterTableClusterSort(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_COMPACT) { - analyzeAlterTableCompact(ast, tableName, partSpec); - } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS){ - analyzeAlterTableUpdateStats(ast, tableName, partSpec); - } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT) { - analyzeAlterTableDropConstraint(ast, tableName); - } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT) { - analyzeAlterTableAddConstraint(ast, tableName); - } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLUMNS) { - analyzeAlterTableUpdateColumns(ast, tableName, partSpec); + analyzeAlterTableCompact(ast, tName.getTable(), partSpec); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS) { + analyzeAlterTableUpdateStats(ast, tName.getTable(), partSpec); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT) { + analyzeAlterTableDropConstraint(ast, tName.getTable()); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT) { + analyzeAlterTableAddConstraint(ast, tName.getTable()); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLUMNS) { + analyzeAlterTableUpdateColumns(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_OWNER) { - analyzeAlterTableOwner(ast, tableName); + analyzeAlterTableOwner(ast, tName.getTable()); } break; } @@ -449,28 +449,27 @@ public void analyzeInternal(ASTNode input) throws SemanticException { analyzeDropTable(ast, TableType.MATERIALIZED_VIEW); break; case HiveParser.TOK_ALTERVIEW: { - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + final TableName tName = getQualifiedTableName((ASTNode) ast.getChild(0)); ast = (ASTNode) ast.getChild(1); if (ast.getType() == HiveParser.TOK_ALTERVIEW_PROPERTIES) { - analyzeAlterTableProps(qualified, null, ast, true, false); + analyzeAlterTableProps(tName, null, ast, true, false); } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_DROPPROPERTIES) { - analyzeAlterTableProps(qualified, null, ast, true, true); + analyzeAlterTableProps(tName, null, ast, true, true); } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_ADDPARTS) { - analyzeAlterTableAddParts(qualified, ast, true); + analyzeAlterTableAddParts(tName, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_DROPPARTS) { - analyzeAlterTableDropParts(qualified, ast, true); + analyzeAlterTableDropParts(tName, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_RENAME) { - analyzeAlterTableRename(qualified, ast, true); + analyzeAlterTableRename(tName, ast, true); } break; } case HiveParser.TOK_ALTER_MATERIALIZED_VIEW: { ast = (ASTNode) input.getChild(1); - String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0)); - String tableName = getDotName(qualified); + final TableName tName = getQualifiedTableName((ASTNode) input.getChild(0)); if (ast.getType() == HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REWRITE) { - analyzeAlterMaterializedViewRewrite(tableName, ast); + analyzeAlterMaterializedViewRewrite(tName.getDbTable(), ast); } break; } @@ -816,8 +815,8 @@ private void analyzeAlterDatabaseLocation(ASTNode ast) throws SemanticException addAlterDbDesc(alterDesc); } - private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws SemanticException { - Table destTable = getTable(qualified); + private void analyzeExchangePartition(TableName tName, ASTNode ast) throws SemanticException { + Table destTable = getTable(tName); Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(1))); // Get the partition specs @@ -1708,10 +1707,9 @@ private boolean hasConstraintsEnabled(final String tblName) throws SemanticExcep return false; } - private void analyzeAlterTableProps(String[] qualified, HashMap partSpec, - ASTNode ast, boolean expectView, boolean isUnset) throws SemanticException { + private void analyzeAlterTableProps(TableName tableName, HashMap partSpec, ASTNode ast, + boolean expectView, boolean isUnset) throws SemanticException { - String tableName = getDotName(qualified); HashMap mapProp = getProps((ASTNode) (ast.getChild(0)) .getChild(0)); EnvironmentContext environmentContext = null; @@ -1735,10 +1733,10 @@ private void analyzeAlterTableProps(String[] qualified, HashMap } // if table is being modified to be external we need to make sure existing table // doesn't have enabled constraint since constraints are disallowed with such tables - else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ - if(hasConstraintsEnabled(qualified[1])){ + else if (entry.getKey().equals("external") && entry.getValue().equals("true")) { + if (hasConstraintsEnabled(tableName.getTable())) { throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Table: " + tableName + " has constraints enabled." + ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Table: " + tableName.getDbTable() + " has constraints enabled." + "Please remove those constraints to change this property.")); } } @@ -1770,18 +1768,16 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ } alterTblDesc.setProps(mapProp); alterTblDesc.setEnvironmentContext(environmentContext); - alterTblDesc.setOldName(tableName); - - + alterTblDesc.setOldName(tableName.getDbTable()); - boolean isToTxn = AcidUtils.isTablePropertyTransactional(mapProp) - || mapProp.containsKey(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES); - addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, isToTxn); + boolean isToTxn = AcidUtils.isTablePropertyTransactional(mapProp) || mapProp + .containsKey(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES); + addInputsOutputsAlterTable(tableName.getDbTable(), partSpec, alterTblDesc, isToTxn); // This special handling is because we cannot generate write ID for full ACID conversion, // it will break the weird 10000001-write-ID logic that is currently in use. However, we do // want to generate a write ID for prop changes for existing txn tables, or MM conversion. - boolean isAcidConversion = isToTxn && AcidUtils.isFullAcidTable(mapProp) - && !AcidUtils.isFullAcidTable(getTable(qualified, true)); + boolean isAcidConversion = + isToTxn && AcidUtils.isFullAcidTable(mapProp) && !AcidUtils.isFullAcidTable(getTable(tableName, true)); DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), alterTblDesc); if (isToTxn) { @@ -1790,7 +1786,7 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ ddlWork.setNeedLock(true); // Hmm... why don't many other operations here need locks? } if (changeStatsSucceeded) { - Table table = getTable(qualified, true); + Table table = getTable(tableName, true); if (AcidUtils.isTransactionalTable(table)) { alterTblDesc.setIsExplicitStatsUpdate(true); setAcidDdlDesc(alterTblDesc); @@ -2244,10 +2240,9 @@ private void analyzeAlterTableDropConstraint(ASTNode ast, String tableName) private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throws SemanticException { ASTNode parent = (ASTNode) ast.getParent(); - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); ASTNode child = (ASTNode) ast.getChild(0); List primaryKeys = new ArrayList<>(); List foreignKeys = new ArrayList<>(); @@ -2256,21 +2251,17 @@ private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) switch (child.getToken().getType()) { case HiveParser.TOK_UNIQUE: - BaseSemanticAnalyzer.processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], - child, uniqueConstraints); + BaseSemanticAnalyzer.processUniqueConstraints(tName, child, uniqueConstraints); break; case HiveParser.TOK_PRIMARY_KEY: - BaseSemanticAnalyzer.processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], - child, primaryKeys); + BaseSemanticAnalyzer.processPrimaryKeys(tName, child, primaryKeys); break; case HiveParser.TOK_FOREIGN_KEY: - BaseSemanticAnalyzer.processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], - child, foreignKeys); + BaseSemanticAnalyzer.processForeignKeys(tName, child, foreignKeys); break; case HiveParser.TOK_CHECK_CONSTRAINT: - BaseSemanticAnalyzer.processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], - child, null, checkConstraints, child, - this.ctx.getTokenRewriteStream()); + BaseSemanticAnalyzer + .processCheckConstraints(tName, child, null, checkConstraints, child, this.ctx.getTokenRewriteStream()); break; default: throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( @@ -2780,17 +2771,15 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { private void analyzeShowTableProperties(ASTNode ast) throws SemanticException { ShowTblPropertiesDesc showTblPropertiesDesc; - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + final TableName tName = getQualifiedTableName((ASTNode) ast.getChild(0)); String propertyName = null; if (ast.getChildCount() > 1) { propertyName = unescapeSQLString(ast.getChild(1).getText()); } - String tableNames = getDotName(qualified); - validateTable(tableNames, null); + validateTable(tName.getDbTable(), null); - showTblPropertiesDesc = new ShowTblPropertiesDesc(ctx.getResFile().toString(), tableNames, - propertyName); + showTblPropertiesDesc = new ShowTblPropertiesDesc(ctx.getResFile().toString(), tName.getDbTable(), propertyName); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showTblPropertiesDesc))); setFetchTask(createFetchTask(showTblPropertiesDesc.getSchema())); @@ -3188,25 +3177,22 @@ private void analyzeDescFunction(ASTNode ast) throws SemanticException { } - private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expectView) + private void analyzeAlterTableRename(TableName source, ASTNode ast, boolean expectView) throws SemanticException { - String[] target = getQualifiedTableName((ASTNode) ast.getChild(0)); + final TableName target = getQualifiedTableName((ASTNode) ast.getChild(0)); - String sourceName = getDotName(source); - String targetName = getDotName(target); - - AlterTableDesc alterTblDesc = new AlterTableDesc(sourceName, targetName, expectView, null); - Table table = getTable(sourceName, true); + AlterTableDesc alterTblDesc = new AlterTableDesc(source.getDbTable(), target.getDbTable(), expectView, null); + Table table = getTable(source.getDbTable(), true); if (AcidUtils.isTransactionalTable(table)) { setAcidDdlDesc(alterTblDesc); } - addInputsOutputsAlterTable(sourceName, null, alterTblDesc); + addInputsOutputsAlterTable(source.getDbTable(), null, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } - private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTNode ast, - HashMap partSpec) throws SemanticException { + private void analyzeAlterTableRenameCol(TableName tName, ASTNode ast, HashMap partSpec) + throws SemanticException { String newComment = null; boolean first = false; String flagCol = null; @@ -3249,35 +3235,29 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN switch (constraintChild.getToken().getType()) { case HiveParser.TOK_CHECK_CONSTRAINT: checkConstraints = new ArrayList<>(); - processCheckConstraints(catName, qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), checkConstraints, (ASTNode)ast.getChild(2), - this.ctx.getTokenRewriteStream()); + processCheckConstraints(tName, constraintChild, ImmutableList.of(newColName), checkConstraints, + (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); break; case HiveParser.TOK_DEFAULT_VALUE: defaultConstraints = new ArrayList<>(); - processDefaultConstraints(catName, qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), defaultConstraints, (ASTNode)ast.getChild(2), - this.ctx.getTokenRewriteStream()); + processDefaultConstraints(tName, constraintChild, ImmutableList.of(newColName), defaultConstraints, + (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); break; case HiveParser.TOK_NOT_NULL: notNullConstraints = new ArrayList<>(); - processNotNullConstraints(catName, qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), notNullConstraints); + processNotNullConstraints(tName, constraintChild, ImmutableList.of(newColName), notNullConstraints); break; case HiveParser.TOK_UNIQUE: uniqueConstraints = new ArrayList<>(); - processUniqueConstraints(catName, qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), uniqueConstraints); + processUniqueConstraints(tName, constraintChild, ImmutableList.of(newColName), uniqueConstraints); break; case HiveParser.TOK_PRIMARY_KEY: primaryKeys = new ArrayList<>(); - processPrimaryKeys(qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), primaryKeys); + processPrimaryKeys(tName, constraintChild, ImmutableList.of(newColName), primaryKeys); break; case HiveParser.TOK_FOREIGN_KEY: foreignKeys = new ArrayList<>(); - processForeignKeys(qualified[0], qualified[1], constraintChild, - foreignKeys); + processForeignKeys(tName, constraintChild, foreignKeys); break; default: throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( @@ -3286,7 +3266,7 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN } /* Validate the operation of renaming a column name. */ - Table tab = getTable(qualified); + Table tab = getTable(tName); if(checkConstraints != null && !checkConstraints.isEmpty()) { validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf()); @@ -3307,21 +3287,18 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg()); } - String tblName = getDotName(qualified); AlterTableDesc alterTblDesc; if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) { - alterTblDesc = new AlterTableDesc(tblName, partSpec, - unescapeIdentifier(oldColName), unescapeIdentifier(newColName), - newType, newComment, first, flagCol, isCascade); + alterTblDesc = new AlterTableDesc(tName.getDbTable(), partSpec, unescapeIdentifier(oldColName), + unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade); } else { - alterTblDesc = new AlterTableDesc(tblName, partSpec, - unescapeIdentifier(oldColName), unescapeIdentifier(newColName), - newType, newComment, first, flagCol, isCascade, - primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints); + alterTblDesc = new AlterTableDesc(tName.getDbTable(), partSpec, unescapeIdentifier(oldColName), + unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, + uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints); } - addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); + addInputsOutputsAlterTable(tName.getDbTable(), partSpec, alterTblDesc); if (AcidUtils.isTransactionalTable(tab)) { // Note: we might actually need it only when certain changes (e.g. name or type?) are made. setAcidDdlDesc(alterTblDesc); @@ -3374,30 +3351,26 @@ private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, alterBucketNum))); } - private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast, - HashMap partSpec, AlterTableTypes alterType) throws SemanticException { + private void analyzeAlterTableModifyCols(TableName tName, ASTNode ast, HashMap partSpec, + AlterTableTypes alterType) throws SemanticException { - String tblName = getDotName(qualified); List newCols = getColumns((ASTNode) ast.getChild(0)); boolean isCascade = false; if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { isCascade = true; } - AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, newCols, - alterType, isCascade); - Table table = getTable(tblName, true); + AlterTableDesc alterTblDesc = new AlterTableDesc(tName.getDbTable(), partSpec, newCols, alterType, isCascade); + Table table = getTable(tName.getDbTable(), true); if (AcidUtils.isTransactionalTable(table)) { setAcidDdlDesc(alterTblDesc); } - addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc))); + addInputsOutputsAlterTable(tName.getDbTable(), partSpec, alterTblDesc); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } - private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean expectView) - throws SemanticException { + private void analyzeAlterTableDropParts(TableName tName, ASTNode ast, boolean expectView) throws SemanticException { boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null) || HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT); @@ -3414,7 +3387,7 @@ private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean Table tab = null; try { - tab = getTable(qualified); + tab = getTable(tName); } catch (SemanticException se){ if (replicationSpec.isInReplicationScope() && ( @@ -3448,17 +3421,15 @@ private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean addTableDropPartsOutputs(tab, partSpecs.values(), !ifExists); DropTableDesc dropTblDesc = - new DropTableDesc(getDotName(qualified), partSpecs, expectView ? TableType.VIRTUAL_VIEW : null, - mustPurge, replicationSpec); + new DropTableDesc(tName.getDbTable(), partSpecs, expectView ? TableType.VIRTUAL_VIEW : null, mustPurge, + replicationSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc))); } - private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) - throws SemanticException { - + private void analyzeAlterTablePartColType(TableName tName, ASTNode ast) throws SemanticException { // check if table exists. - Table tab = getTable(qualified); + Table tab = getTable(tName); inputs.add(new ReadEntity(tab)); // validate the DDL is a valid operation on the table. @@ -3495,8 +3466,7 @@ private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(newCol.getName())); } - AlterTableAlterPartDesc alterTblAlterPartDesc = - new AlterTableAlterPartDesc(getDotName(qualified), newCol); + AlterTableAlterPartDesc alterTblAlterPartDesc = new AlterTableAlterPartDesc(tName.getDbTable(), newCol); if (AcidUtils.isTransactionalTable(tab)) { setAcidDdlDesc(alterTblAlterPartDesc); } @@ -3505,7 +3475,7 @@ private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) alterTblAlterPartDesc))); } - /** + /** * Add one or more partitions to a table. Useful when the data has been copied * to the right location by some other process. * @@ -3518,13 +3488,12 @@ private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) * @throws SemanticException * Parsing failed */ - private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boolean expectView) - throws SemanticException { + private void analyzeAlterTableAddParts(TableName tName, CommonTree ast, boolean expectView) throws SemanticException { // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+) boolean ifNotExists = ast.getChild(0).getType() == HiveParser.TOK_IFNOTEXISTS; - Table tab = getTable(qualified); + Table tab = getTable(tName); boolean isView = tab.isView(); validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView); outputs.add(new WriteEntity(tab, @@ -3599,9 +3568,9 @@ private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boole // Compile internal query to capture underlying table partition dependencies StringBuilder cmd = new StringBuilder(); cmd.append("SELECT * FROM "); - cmd.append(HiveUtils.unparseIdentifier(qualified[0])); + cmd.append(HiveUtils.unparseIdentifier(tName.getDb())); cmd.append("."); - cmd.append(HiveUtils.unparseIdentifier(qualified[1])); + cmd.append(HiveUtils.unparseIdentifier(tName.getTable())); cmd.append(" WHERE "); boolean firstOr = true; for (int i = 0; i < addPartitionDesc.getPartitionCount(); ++i) { @@ -3704,10 +3673,9 @@ private void handleTransactionalTable(Table tab, AddPartitionDesc addPartitionDe * @throws SemanticException * Parsing failed */ - private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) - throws SemanticException { + private void analyzeAlterTableTouch(TableName tName, CommonTree ast) throws SemanticException { - Table tab = getTable(qualified); + Table tab = getTable(tName); validateAlterTableType(tab, AlterTableTypes.TOUCH); inputs.add(new ReadEntity(tab)); @@ -3716,7 +3684,7 @@ private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) if (partSpecs.size() == 0) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - getDotName(qualified), null, + tName.getDbTable(), null, AlterTableDesc.AlterTableTypes.TOUCH); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_NO_LOCK)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), @@ -3724,23 +3692,21 @@ private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) } else { addTablePartsOutputs(tab, partSpecs, WriteEntity.WriteType.DDL_NO_LOCK); for (Map partSpec : partSpecs) { - AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - getDotName(qualified), partSpec, - AlterTableDesc.AlterTableTypes.TOUCH); + AlterTableSimpleDesc touchDesc = + new AlterTableSimpleDesc(tName.getDbTable(), partSpec, AlterTableDesc.AlterTableTypes.TOUCH); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc))); } } } - private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolean isUnArchive) - throws SemanticException { + private void analyzeAlterTableArchive(TableName tName, CommonTree ast, boolean isUnArchive) throws SemanticException { if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) { throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg()); } - Table tab = getTable(qualified); + Table tab = getTable(tName); // partition name to value List> partSpecs = getPartitionSpecs(tab, ast); @@ -3763,8 +3729,7 @@ private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolea } catch (HiveException e) { throw new SemanticException(e.getMessage(), e); } - AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc( - getDotName(qualified), partSpec, + AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc(tName.getDbTable(), partSpec, (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc))); @@ -4137,35 +4102,32 @@ private void addTableDropPartsOutputs(Table tab, * node * @throws SemanticException */ - private void analyzeAltertableSkewedby(String[] qualified, ASTNode ast) throws SemanticException { + private void analyzeAltertableSkewedby(TableName tName, ASTNode ast) throws SemanticException { /** * Throw an error if the user tries to use the DDL with * hive.internal.ddl.list.bucketing.enable set to false. */ - HiveConf hiveConf = SessionState.get().getConf(); - - Table tab = getTable(qualified); + Table tab = getTable(tName); inputs.add(new ReadEntity(tab)); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_EXCLUSIVE)); validateAlterTableType(tab, AlterTableTypes.ADDSKEWEDBY); - String tableName = getDotName(qualified); if (ast.getChildCount() == 0) { /* Convert a skewed table to non-skewed table. */ - AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, true, - new ArrayList(), new ArrayList>()); + AlterTableDesc alterTblDesc = + new AlterTableDesc(tName.getDbTable(), true, new ArrayList<>(), new ArrayList<>()); alterTblDesc.setStoredAsSubDirectories(false); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } else { switch (((ASTNode) ast.getChild(0)).getToken().getType()) { case HiveParser.TOK_TABLESKEWED: - handleAlterTableSkewedBy(ast, tableName, tab); + handleAlterTableSkewedBy(ast, tName.getDbTable(), tab); break; case HiveParser.TOK_STOREDASDIRS: - handleAlterTableDisableStoredAsDirs(tableName, tab); + handleAlterTableDisableStoredAsDirs(tName.getDbTable(), tab); break; default: assert false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MaterializedViewRebuildSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MaterializedViewRebuildSemanticAnalyzer.java index 7a3c16390c..06e3fc557e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MaterializedViewRebuildSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MaterializedViewRebuildSemanticAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.parse; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.LockState; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -27,7 +28,6 @@ import org.apache.hadoop.hive.ql.lockmgr.LockException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -54,12 +54,12 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { return; } - String[] qualifiedTableName = getQualifiedTableName((ASTNode) ast.getChild(0)); - String dbDotTable = getDotName(qualifiedTableName); + final TableName tableName = + getQualifiedTableName((ASTNode) ast.getChild(0), SessionState.get().getCurrentCatalog()); ASTNode rewrittenAST; // We need to go lookup the table and get the select statement and then parse it. try { - Table tab = getTableObjectByName(dbDotTable, true); + Table tab = getTableObjectByName(tableName.getDbTable(), true); if (!tab.isMaterializedView()) { // Cannot rebuild not materialized view throw new SemanticException(ErrorMsg.REBUILD_NO_MATERIALIZED_VIEW); @@ -71,9 +71,8 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { throw new SemanticException(ErrorMsg.MATERIALIZED_VIEW_DEF_EMPTY); } Context ctx = new Context(queryState.getConf()); - rewrittenAST = ParseUtils.parse("insert overwrite table " + - "`" + qualifiedTableName[0] + "`.`" + qualifiedTableName[1] + "` " + - viewText, ctx); + rewrittenAST = ParseUtils.parse(String.join("","insert overwrite table ", + "`", tableName.getDb(), "`.`", tableName.getTable(), "` ", viewText), ctx); this.ctx.addRewrittenStatementContext(ctx); if (!this.ctx.isExplainPlan() && AcidUtils.isTransactionalTable(tab)) { @@ -84,22 +83,21 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { LockState state; try { state = txnManager.acquireMaterializationRebuildLock( - qualifiedTableName[0], qualifiedTableName[1], txnManager.getCurrentTxnId()).getState(); + tableName.getDb(), tableName.getTable(), txnManager.getCurrentTxnId()).getState(); } catch (LockException e) { throw new SemanticException("Exception acquiring lock for rebuilding the materialized view", e); } if (state != LockState.ACQUIRED) { - throw new SemanticException("Another process is rebuilding the materialized view " + dbDotTable); + throw new SemanticException("Another process is rebuilding the materialized view " + tableName.getDbTable()); } } } catch (Exception e) { throw new SemanticException(e); } mvRebuildMode = MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD; - mvRebuildDbName = qualifiedTableName[0]; - mvRebuildName = qualifiedTableName[1]; + mvRebuildName = tableName; - LOG.debug("Rebuilding materialized view " + dbDotTable); + LOG.debug("Rebuilding materialized view " + tableName.getDbTable()); super.analyzeInternal(rewrittenAST); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java index 33247f0745..31fde804de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java @@ -24,6 +24,7 @@ import java.util.Map; import java.util.Set; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; @@ -176,27 +177,27 @@ protected Table getTargetTable(ASTNode tabRef) throws SemanticException { /** * @param throwException if false, return null if table doesn't exist, else throw */ - protected static Table getTable(ASTNode tabRef, Hive db, boolean throwException) throws SemanticException { - String[] tableName; - switch (tabRef.getType()) { + protected static Table getTable(ASTNode tabNode, Hive db, boolean throwException) throws SemanticException { + final TableName tName; + switch (tabNode.getType()) { case HiveParser.TOK_TABREF: - tableName = getQualifiedTableName((ASTNode) tabRef.getChild(0)); + tName = getQualifiedTableName((ASTNode) tabNode.getChild(0)); break; case HiveParser.TOK_TABNAME: - tableName = getQualifiedTableName(tabRef); + tName = getQualifiedTableName(tabNode); break; default: - throw raiseWrongType("TOK_TABREF|TOK_TABNAME", tabRef); + throw raiseWrongType("TOK_TABREF|TOK_TABNAME", tabNode); } Table mTable; try { - mTable = db.getTable(tableName[0], tableName[1], throwException); + mTable = db.getTable(tName.getDb(), tName.getTable(), throwException); } catch (InvalidTableException e) { - LOG.error("Failed to find table " + getDotName(tableName) + " got exception " + e.getMessage()); - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(getDotName(tableName)), e); + LOG.error("Failed to find table " + tName.getDbTable() + " got exception " + e.getMessage()); + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tName.getDbTable()), e); } catch (HiveException e) { - LOG.error("Failed to find table " + getDotName(tableName) + " got exception " + e.getMessage()); + LOG.error("Failed to find table " + tName.getDbTable() + " got exception " + e.getMessage()); throw new SemanticException(e.getMessage(), e); } return mTable; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 8dc5b34a34..ea5774110a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -69,6 +69,7 @@ import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.common.StringInternUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; @@ -359,8 +360,7 @@ // whether this is a mv rebuild rewritten expression protected MaterializationRebuildMode mvRebuildMode = MaterializationRebuildMode.NONE; - protected String mvRebuildDbName; // Db name for materialization to rebuild - protected String mvRebuildName; // Name for materialization to rebuild + protected TableName mvRebuildName; // Table ref names for for materialization to rebuild protected volatile boolean disableJoinMerge = false; protected final boolean defaultJoinMerge; @@ -2304,14 +2304,13 @@ private void getMetaData(QB qb, ReadEntity parentInput) location = new Path(qb.getTableDesc().getLocation()); } else { // allocate a temporary output dir on the location of the table - String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - String[] names = Utilities.getDbTableName(tableName); + final TableName tName = Utilities.getTableName(getUnescapedName((ASTNode) ast.getChild(0))); try { Warehouse wh = new Warehouse(conf); //Use destination table's db location. String destTableDb = qb.getTableDesc() != null ? qb.getTableDesc().getDatabaseName() : null; if (destTableDb == null) { - destTableDb = names[0]; + destTableDb = tName.getDb(); } location = wh.getDatabasePath(db.getDatabase(destTableDb)); } catch (MetaException e) { @@ -6903,12 +6902,11 @@ private void genPartnCols(String dest, Operator input, QB qb, @SuppressWarnings("unchecked") private void setStatsForNonNativeTable(String dbName, String tableName) throws SemanticException { - String qTableName = DDLSemanticAnalyzer.getDotName(new String[] { dbName, - tableName }); + final TableName tName = TableName.fromString(tableName,SessionState.get().getCurrentCatalog(),dbName); AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, null, false); HashMap mapProp = new HashMap<>(); mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); - alterTblDesc.setOldName(qTableName); + alterTblDesc.setOldName(tName.getDbTable()); alterTblDesc.setProps(mapProp); alterTblDesc.setDropIfExists(true); this.rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); @@ -7927,7 +7925,7 @@ private void handleLineage(LoadTableDesc ltd, Operator output) } else if ( queryState.getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) { Path tlocation = null; - String tName = Utilities.getDbTableName(tableDesc.getTableName())[1]; + final String tName = Utilities.getTableName(tableDesc.getTableName()).getTable(); try { Warehouse wh = new Warehouse(conf); tlocation = wh.getDefaultTablePath(db.getDatabase(tableDesc.getDatabaseName()), @@ -12619,7 +12617,7 @@ protected void saveViewDefinition() throws SemanticException { sb.append(" FROM ("); sb.append(expandedText); sb.append(") "); - sb.append(HiveUtils.unparseIdentifier(Utilities.getDbTableName(createVwDesc.getViewName())[1], conf)); + sb.append(HiveUtils.unparseIdentifier(Utilities.getTableName(createVwDesc.getViewName()).getTable(), conf)); expandedText = sb.toString(); } } else { @@ -12653,7 +12651,7 @@ protected void saveViewDefinition() throws SemanticException { sb.append(" FROM ("); sb.append(expandedText); sb.append(") "); - sb.append(HiveUtils.unparseIdentifier(Utilities.getDbTableName(createVwDesc.getViewName())[1], conf)); + sb.append(HiveUtils.unparseIdentifier(Utilities.getTableName(createVwDesc.getViewName()).getTable(), conf)); expandedText = sb.toString(); } @@ -13174,8 +13172,7 @@ boolean hasConstraints(final List partCols, final List cols = new ArrayList(); @@ -13213,7 +13210,7 @@ ASTNode analyzeCreateTable( RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); - LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine()); + LOG.info("Creating table " + tName.getDbTable() + " position=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); /* @@ -13377,7 +13374,7 @@ ASTNode analyzeCreateTable( // check for existence of table if (ifNotExists) { try { - Table table = getTable(qualifiedTabName, false); + Table table = getTable(tName, false); if (table != null) { // table exists return null; } @@ -13415,11 +13412,11 @@ ASTNode analyzeCreateTable( throw new SemanticException( "Partition columns can only declared using their name and types in regular CREATE TABLE statements"); } - tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); + tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, tName.getDbTable(), sortCols, + isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(tName, TableType.MANAGED_TABLE, isTemporary, tblProps); - CreateTableDesc crtTblDesc = new CreateTableDesc(dbDotTab, isExt, isTemporary, cols, partCols, + CreateTableDesc crtTblDesc = new CreateTableDesc(tName.getDbTable(), isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, @@ -13440,14 +13437,14 @@ ASTNode analyzeCreateTable( case ctt: // CREATE TRANSACTIONAL TABLE if (isExt) { throw new SemanticException( - qualifiedTabName[1] + " cannot be declared transactional because it's an external table"); + tName.getTable() + " cannot be declared transactional because it's an external table"); } - tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, - isTemporary, isTransactional); - addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, false, tblProps); + tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, tName.getDbTable(), sortCols, + isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(tName, TableType.MANAGED_TABLE, false, tblProps); CreateTableDesc crtTranTblDesc = - new CreateTableDesc(dbDotTab, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, + new CreateTableDesc(tName.getDbTable(), isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), @@ -13463,9 +13460,9 @@ ASTNode analyzeCreateTable( break; case CTLT: // create table like - tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); + tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, tName.getDbTable(), sortCols, + isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(tName, TableType.MANAGED_TABLE, isTemporary, tblProps); if (isTemporary) { Table likeTable = getTable(likeTableName, false); @@ -13474,7 +13471,7 @@ ASTNode analyzeCreateTable( + "and source table in CREATE TABLE LIKE is partitioned."); } } - CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(dbDotTab, isExt, isTemporary, + CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tName.getDbTable(), isExt, isTemporary, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists, likeTableName, isUserStorageFormat); @@ -13486,27 +13483,25 @@ ASTNode analyzeCreateTable( if (isTemporary) { if (!ctx.isExplainSkipExecution() && !isMaterialization) { - String dbName = qualifiedTabName[0]; - String tblName = qualifiedTabName[1]; SessionState ss = SessionState.get(); if (ss == null) { - throw new SemanticException("No current SessionState, cannot create temporary table " - + dbName + "." + tblName); + throw new SemanticException( + String.join("", "No current SessionState, cannot create temporary table ", tName.getDbTable())); } Map tables = SessionHiveMetaStoreClient. - getTempTablesForDatabase(dbName, tblName); - if (tables != null && tables.containsKey(tblName)) { - throw new SemanticException("Temporary table " + dbName + "." + tblName - + " already exists"); + getTempTablesForDatabase(tName.getDb(), tName.getTable()); + if (tables != null && tables.containsKey(tName.getTable())) { + throw new SemanticException(String.join("", "Temporary table ", tName.getDbTable(), " already exists")); } } } else { // Verify that the table does not already exist // dumpTable is only used to check the conflict for non-temporary tables try { - Table dumpTable = db.newTable(dbDotTab); - if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx.isExplainSkipExecution()) { - throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTab)); + Table dumpTable = db.newTable(tName.getDbTable()); + if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx + .isExplainSkipExecution()) { + throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tName.getDbTable())); } } catch (HiveException e) { throw new SemanticException(e); @@ -13548,9 +13543,9 @@ ASTNode analyzeCreateTable( } tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); - tableDesc = new CreateTableDesc(qualifiedTabName[0], dbDotTab, isExt, isTemporary, cols, + tblProps, isExt, storageFormat, tName.getDbTable(), sortCols, isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(tName, TableType.MANAGED_TABLE, isTemporary, tblProps); + tableDesc = new CreateTableDesc(tName.getDb(), tName.getTable(), isExt, isTemporary, cols, partColNames, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(), @@ -13572,12 +13567,12 @@ ASTNode analyzeCreateTable( } /** Adds entities for create table/create view. */ - private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, + private void addDbAndTabToOutputs(TableName tableName, TableType type, boolean isTemporary, Map tblProps) throws SemanticException { - Database database = getDatabase(qualifiedTabName[0]); + Database database = getDatabase(tableName.getDb()); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED)); - Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]); + Table t = new Table(tableName.getDb(), tableName.getTable()); t.setParameters(tblProps); t.setTableType(type); t.setTemporary(isTemporary); @@ -13585,8 +13580,7 @@ private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, } protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCtx) throws SemanticException { - String[] qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0)); - String dbDotTable = getDotName(qualTabName); + final TableName tName = getQualifiedTableName((ASTNode) ast.getChild(0)); List cols = null; boolean ifNotExists = false; boolean rewriteEnabled = true; @@ -13602,7 +13596,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); - LOG.info("Creating view " + dbDotTable + " position=" + LOG.info("Creating view " + tName.getDbTable() + " position=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); for (int num = 1; num < numCh; num++) { @@ -13670,9 +13664,9 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt // Verify that the table does not already exist // dumpTable is only used to check the conflict for non-temporary tables try { - Table dumpTable = db.newTable(dbDotTable); + Table dumpTable = db.newTable(tName.getDbTable()); if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx.isExplainSkipExecution()) { - throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTable)); + throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tName.getDbTable())); } } catch (HiveException e) { throw new SemanticException(e); @@ -13689,21 +13683,21 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt if (isMaterialized) { createVwDesc = new CreateViewDesc( - dbDotTable, cols, comment, tblProps, partColNames, + tName.getDbTable(), cols, comment, tblProps, partColNames, ifNotExists, isRebuild, rewriteEnabled, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps()); - addDbAndTabToOutputs(qualTabName, TableType.MATERIALIZED_VIEW, false, tblProps); + addDbAndTabToOutputs(tName, TableType.MATERIALIZED_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATE_MATERIALIZED_VIEW); } else { createVwDesc = new CreateViewDesc( - dbDotTable, cols, comment, tblProps, partColNames, + tName.getDbTable(), cols, comment, tblProps, partColNames, ifNotExists, orReplace, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), storageFormat.getSerde()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createVwDesc))); - addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW, false, tblProps); + addDbAndTabToOutputs(tName, TableType.VIRTUAL_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATEVIEW); } qb.setViewDesc(createVwDesc); @@ -15210,10 +15204,10 @@ public boolean isValidQueryCaching() { protected String getFullTableNameForSQL(ASTNode n) throws SemanticException { switch (n.getType()) { case HiveParser.TOK_TABNAME: - String[] tableName = getQualifiedTableName(n); - return getDotName(new String[] { - HiveUtils.unparseIdentifier(tableName[0], this.conf), - HiveUtils.unparseIdentifier(tableName[1], this.conf) }); + final TableName initDbTbl = getQualifiedTableName(n); + final TableName tName = TableName.fromString(HiveUtils.unparseIdentifier(initDbTbl.getTable(), this.conf), + SessionState.get().getCurrentCatalog(), HiveUtils.unparseIdentifier(initDbTbl.getDb(), this.conf)); + return tName.getDbTable(); case HiveParser.TOK_TABREF: return getFullTableNameForSQL((ASTNode) n.getChild(0)); default: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 1ec5774178..185efa52e2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -24,6 +24,7 @@ import org.apache.commons.collections.*; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.HiveStatsUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -450,12 +451,12 @@ private Path getDefaultCtasLocation(final ParseContext pCtx) throws SemanticExce } else if (pCtx.getQueryProperties().isMaterializedView()) { protoName = pCtx.getCreateViewDesc().getViewName(); } - String[] names = Utilities.getDbTableName(protoName); - if (!db.databaseExists(names[0])) { - throw new SemanticException("ERROR: The database " + names[0] + " does not exist."); + final TableName tn = Utilities.getTableName(protoName); + if (!db.databaseExists(tn.getDb())) { + throw new SemanticException("ERROR: The database " + tn.getDb() + " does not exist."); } Warehouse wh = new Warehouse(conf); - return wh.getDefaultTablePath(db.getDatabase(names[0]), names[1], isExternal); + return wh.getDefaultTablePath(db.getDatabase(tn.getDb()), tn.getTable(), isExternal); } catch (HiveException e) { throw new SemanticException(e); } catch (MetaException e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java index 18ed6fb418..ea42d8931a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java @@ -24,6 +24,7 @@ import java.util.Map; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -63,7 +64,8 @@ private final Hive db; public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { - this.db = db; + // TODO: Initialize Hive config e.g. to get catalog + this.db = db; } @Override @@ -242,8 +244,9 @@ protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticExcept ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { subject.setTable(true); - String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); - subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); + final TableName tName = BaseSemanticAnalyzer.getQualifiedTableName(gchild); + // TODO: handle catalog + subject.setObject(tName.getDbTable()); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index 93641af215..212ada6f37 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -415,7 +415,7 @@ public String getOldName() { */ public void setOldName(String oldName) throws SemanticException { // Make sure we qualify the name from the outset so there's no ambiguity. - this.oldName = String.join(".", Utilities.getDbTableName(oldName)); + this.oldName = Utilities.getTableName(oldName).getDbTable(); } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java index c71ff6d713..bd9048c875 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java @@ -27,6 +27,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.PartitionManagementTask; import org.apache.hadoop.hive.metastore.TableType; @@ -719,16 +720,14 @@ public boolean isCTAS() { } public Table toTable(HiveConf conf) throws HiveException { - String databaseName = getDatabaseName(); - String tableName = getTableName(); - - if (databaseName == null || tableName.contains(".")) { - String[] names = Utilities.getDbTableName(tableName); - databaseName = names[0]; - tableName = names[1]; + final TableName tName; + try { + tName = TableName.fromString(getTableName(), null, getDatabaseName()); + } catch (IllegalArgumentException e) { + throw new HiveException(e.getCause()); } - Table tbl = new Table(databaseName, tableName); + Table tbl = new Table(tName.getDb(), tName.getTable()); if (getTblProps() != null) { tbl.getTTable().getParameters().putAll(getTblProps()); @@ -756,11 +755,10 @@ public Table toTable(HiveConf conf) throws HiveException { if (getSerName() == null) { if (storageHandler == null) { serDeClassName = PlanUtils.getDefaultSerDe().getName(); - LOG.info("Default to " + serDeClassName + " for table " + tableName); + LOG.info("Default to " + serDeClassName + " for table " + tName.getTable()); } else { serDeClassName = storageHandler.getSerDeClass().getName(); - LOG.info("Use StorageHandler-supplied " + serDeClassName - + " for table " + tableName); + LOG.info("Use StorageHandler-supplied " + serDeClassName + " for table " + tName.getTable()); } } else { // let's validate that the serde exists diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java index 7130aba597..430e091b7e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java @@ -25,10 +25,10 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.exec.DDLTask; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -332,11 +332,9 @@ public ReplicationSpec getReplicationSpec(){ } public Table toTable(HiveConf conf) throws HiveException { - String[] names = Utilities.getDbTableName(getViewName()); - String databaseName = names[0]; - String tableName = names[1]; + final TableName tName = Utilities.getTableName(getViewName()); - Table tbl = new Table(databaseName, tableName); + Table tbl = new Table(tName.getDb(), tName.getTable()); tbl.setViewOriginalText(getViewOriginalText()); tbl.setViewExpandedText(getViewExpandedText()); if (isMaterialized()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index 5c30fca2d3..0a642ca904 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -24,6 +24,7 @@ import java.util.Map; import com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -35,9 +36,9 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.session.SessionState; /** * ImportTableDesc. @@ -89,10 +90,9 @@ public ImportTableDesc(String dbName, Table table) throws Exception { this.createTblDesc.setStoredAsSubDirectories(table.getSd().isStoredAsSubDirectories()); break; case VIEW: - String[] qualViewName = { dbName, table.getTableName() }; - String dbDotView = BaseSemanticAnalyzer.getDotName(qualViewName); + final TableName tName = TableName.fromString(table.getTableName(), SessionState.get().getCurrentCatalog(), dbName); if (table.isMaterializedView()) { - this.createViewDesc = new CreateViewDesc(dbDotView, + this.createViewDesc = new CreateViewDesc(tName.getDbTable(), table.getAllCols(), null, // comment passed as table params table.getParameters(), @@ -109,7 +109,7 @@ public ImportTableDesc(String dbName, Table table) throws Exception { this.createViewDesc.setTablesUsed(table.getCreationMetadata() != null ? table.getCreationMetadata().getTablesUsed() : ImmutableSet.of()); } else { - this.createViewDesc = new CreateViewDesc(dbDotView, + this.createViewDesc = new CreateViewDesc(tName.getDbTable(), table.getAllCols(), null, // comment passed as table params table.getParameters(), @@ -199,9 +199,8 @@ public void setTableName(String tableName) throws SemanticException { createTblDesc.setTableName(tableName); break; case VIEW: - String[] qualViewName = { dbName, tableName }; - String dbDotView = BaseSemanticAnalyzer.getDotName(qualViewName); - createViewDesc.setViewName(dbDotView); + final TableName tName = TableName.fromString(tableName, SessionState.get().getCurrentCatalog(), dbName); + createViewDesc.setViewName(tName.getDbTable()); break; } } @@ -212,8 +211,7 @@ public String getTableName() throws SemanticException { return createTblDesc.getTableName(); case VIEW: String dbDotView = createViewDesc.getViewName(); - String[] names = Utilities.getDbTableName(dbDotView); - return names[1]; // names[0] have the Db name and names[1] have the view name + return Utilities.getTableName(dbDotView).getTable(); } return null; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java index 853dcf8a81..0d64f9bbf5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.PrincipalDesc; @@ -64,7 +65,8 @@ public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjec dbTable = new String[] {null, null}; } else { if (privSubjectDesc.getTable()) { - dbTable = Utilities.getDbTableName(privSubjectDesc.getObject()); + final TableName tName = Utilities.getTableName(privSubjectDesc.getObject()); + dbTable = new String[] {tName.getDb(), tName.getTable()}; } else { dbTable = new String[] {privSubjectDesc.getObject(), null}; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index 305b467439..ab24917b59 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -59,6 +59,7 @@ import org.apache.hadoop.hive.ql.io.*; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; @@ -69,7 +70,6 @@ import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.mapred.JobConf; @@ -131,31 +131,11 @@ public void testSerializeTimestamp() { } @Test - public void testgetDbTableName() throws HiveException{ - String tablename; - String [] dbtab; - SessionState.start(new HiveConf(this.getClass())); - String curDefaultdb = SessionState.get().getCurrentDatabase(); - - //test table without db portion - tablename = "tab1"; - dbtab = Utilities.getDbTableName(tablename); - assertEquals("db name", curDefaultdb, dbtab[0]); - assertEquals("table name", tablename, dbtab[1]); - - //test table with db portion - tablename = "dab1.tab1"; - dbtab = Utilities.getDbTableName(tablename); - assertEquals("db name", "dab1", dbtab[0]); - assertEquals("table name", "tab1", dbtab[1]); - - //test invalid table name - tablename = "dab1.tab1.x1"; + public void testGetTableName() throws HiveException{ try { - dbtab = Utilities.getDbTableName(tablename); + Utilities.getTableName(null); fail("exception was expected for invalid table name"); - } catch(HiveException ex){ - assertEquals("Invalid table name " + tablename, ex.getMessage()); + } catch(SemanticException ex){ } } diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java b/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java index f5cb192561..89872ef5a3 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java @@ -22,29 +22,35 @@ * includes utilities for string parsing. */ public class TableName { + + /** Exception message thrown */ + private static final String ILL_ARG_EXCEPTION_MSG = + "Table name must be either , . " + "or .."; + + /** Catalog name */ private final String cat; private final String db; private final String table; /** * - * @param cat catalog name. Cannot be null. If you do not know it you can get it from + * @param catName catalog name. Cannot be null. If you do not know it you can get it from * SessionState.getCurrentCatalog() if you want to use the catalog from the current * session, or from MetaStoreUtils.getDefaultCatalog() if you do not have a session * or want to use the default catalog for the Hive instance. - * @param db database name. Cannot be null. If you do not now it you can get it from + * @param dbName database name. Cannot be null. If you do not now it you can get it from * SessionState.getCurrentDatabase() or use Warehouse.DEFAULT_DATABASE_NAME. - * @param table table name, cannot be null + * @param tableName table name, cannot be null */ - public TableName(String cat, String db, String table) { - this.cat = cat; - this.db = db; - this.table = table; + public TableName(final String catName, final String dbName, final String tableName) { + this.cat = catName; + this.db = dbName; + this.table = tableName; } /** * Build a TableName from a string of the form [[catalog.]database.]table. - * @param name name in string form + * @param name name in string form, not null * @param defaultCatalog default catalog to use if catalog is not in the name. If you do not * know it you can get it from SessionState.getCurrentCatalog() if you * want to use the catalog from the current session, or from @@ -54,17 +60,21 @@ public TableName(String cat, String db, String table) { * not now it you can get it from SessionState.getCurrentDatabase() or * use Warehouse.DEFAULT_DATABASE_NAME. * @return TableName + * @throws IllegalArgumentException if a non-null name is given */ - public static TableName fromString(String name, String defaultCatalog, String defaultDatabase) { + public static TableName fromString(final String name, final String defaultCatalog, final String defaultDatabase) + throws IllegalArgumentException { + if (name == null) { + throw new IllegalArgumentException(String.join("", "Table value was null. ", ILL_ARG_EXCEPTION_MSG)); + } if (name.contains(DatabaseName.CAT_DB_TABLE_SEPARATOR)) { - String names[] = name.split("\\."); + String[] names = name.split("\\."); if (names.length == 2) { return new TableName(defaultCatalog, names[0], names[1]); } else if (names.length == 3) { return new TableName(names[0], names[1], names[2]); } else { - throw new RuntimeException("Table name must be either , . " + - "or .."); + throw new IllegalArgumentException(ILL_ARG_EXCEPTION_MSG); } } else { diff --git a/storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java b/storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java index 0a8cb2a82e..f3f00a63e7 100644 --- a/storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java +++ b/storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java @@ -22,7 +22,7 @@ public class TestTableName { @Test - public void fullname() { + public void fullName() { TableName name = new TableName("cat", "db", "t"); Assert.assertEquals("cat", name.getCat()); Assert.assertEquals("db", name.getDb()); @@ -47,5 +47,12 @@ public void fromString() { Assert.assertEquals("cat", name.getCat()); Assert.assertEquals("db", name.getDb()); Assert.assertEquals("tab", name.getTable()); + + try { + TableName.fromString(null, null, null); + Assert.fail("Name can't be null"); + } catch (IllegalArgumentException e) { + Assert.assertTrue(true); + } } }