commit 2d8ec8a01d11a50a0870ab413a2e936c11790449 Author: David Lavati Date: Fri Apr 26 14:16:21 2019 +0200 HIVE-21198 Introduce a database object reference class Change-Id: I0e9c70971fe742205fd774480779f2d5502d99ac diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 4f14fa59c1..d211162043 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -46,6 +46,7 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.common.ValidWriteIdList; @@ -116,6 +117,7 @@ import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.ParseException; @@ -1641,8 +1643,8 @@ private void acquireLocks() throws CommandProcessorResponse { fsd1.getDirName().compareTo(fsd2.getDirName())); for (FileSinkDesc desc : acidSinks) { TableDesc tableInfo = desc.getTableInfo(); - long writeId = queryTxnMgr.getTableWriteId(Utilities.getDatabaseName(tableInfo.getTableName()), - Utilities.getTableName(tableInfo.getTableName())); + final TableName tn = HiveTableName.ofNullableWithNoDefault(tableInfo.getTableName()); + long writeId = queryTxnMgr.getTableWriteId(tn.getDb(), tn.getTable()); desc.setTableWriteId(writeId); /** @@ -1672,8 +1674,8 @@ private void acquireLocks() throws CommandProcessorResponse { boolean hasAcidDdl = acidDdlDesc != null && acidDdlDesc.mayNeedWriteId(); if (hasAcidDdl) { String fqTableName = acidDdlDesc.getFullTableName(); - long writeId = queryTxnMgr.getTableWriteId( - Utilities.getDatabaseName(fqTableName), Utilities.getTableName(fqTableName)); + final TableName tn = HiveTableName.ofNullableWithNoDefault(fqTableName); + long writeId = queryTxnMgr.getTableWriteId(tn.getDb(), tn.getTable()); acidDdlDesc.setWriteId(writeId); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java index 7ba1c2daef..ab287affcd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java @@ -20,6 +20,7 @@ import java.io.Serializable; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; @@ -39,10 +40,10 @@ } private final String resFile; - private final String tableName; + private final TableName tableName; private final String propertyName; - public ShowTablePropertiesDesc(String resFile, String tableName, String propertyName) { + public ShowTablePropertiesDesc(String resFile, TableName tableName, String propertyName) { this.resFile = resFile; this.tableName = tableName; this.propertyName = propertyName; @@ -59,7 +60,7 @@ public String getResFileString() { @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getTableName() { - return tableName; + return tableName.getNotEmptyDbTable(); } @Explain(displayName = "property name") diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 13d7d6fcdb..3ff1367513 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -48,6 +48,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.DefaultHiveMetaHook; @@ -120,6 +121,7 @@ import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -806,8 +808,7 @@ private int renamePartition(Hive db, RenamePartitionDesc renamePartitionDesc) th return 0; } - String names[] = Utilities.getDbTableName(tableName); - if (Utils.isBootstrapDumpInProgress(db, names[0])) { + if (Utils.isBootstrapDumpInProgress(db, HiveTableName.ofNullable(tableName).getDb())) { LOG.error("DDLTask: Rename Partition not allowed as bootstrap dump in progress"); throw new HiveException("Rename Partition: Not allowed as bootstrap dump in progress"); } @@ -1632,10 +1633,10 @@ private int msck(Hive db, MsckDesc msckDesc) { try { msck = new Msck( false, false); msck.init(db.getConf()); - String[] names = Utilities.getDbTableName(msckDesc.getTableName()); - MsckInfo msckInfo = new MsckInfo(SessionState.get().getCurrentCatalog(), names[0], - names[1], msckDesc.getPartSpecs(), msckDesc.getResFile(), - msckDesc.isRepairPartitions(), msckDesc.isAddPartitions(), msckDesc.isDropPartitions(), -1); + final TableName tableName = HiveTableName.ofNullable(msckDesc.getTableName()); + MsckInfo msckInfo = new MsckInfo(SessionState.get().getCurrentCatalog(), tableName.getDb(), tableName.getTable(), + msckDesc.getPartSpecs(), msckDesc.getResFile(), msckDesc.isRepairPartitions(), msckDesc.isAddPartitions(), + msckDesc.isDropPartitions(), -1); return msck.repair(msckInfo); } catch (MetaException e) { LOG.error("Unable to create msck instance.", e); @@ -2303,8 +2304,9 @@ private static StorageDescriptor retrieveStorageDescriptor(Table tbl, Partition } if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) { - tbl.setDbName(Utilities.getDatabaseName(alterTbl.getNewName())); - tbl.setTableName(Utilities.getTableName(alterTbl.getNewName())); + final TableName tn = HiveTableName.of(alterTbl.getNewName()); + tbl.setDbName(tn.getDb()); + tbl.setTableName(tn.getTable()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDCOLS) { StorageDescriptor sd = retrieveStorageDescriptor(tbl, part); String serializationLib = sd.getSerdeInfo().getSerializationLib(); @@ -2761,12 +2763,11 @@ private void checkMmLb(Partition part) throws HiveException { private int dropConstraint(Hive db, AlterTableDesc alterTbl) throws SemanticException, HiveException { try { - db.dropConstraint(Utilities.getDatabaseName(alterTbl.getOldName()), - Utilities.getTableName(alterTbl.getOldName()), - alterTbl.getConstraintName()); - } catch (NoSuchObjectException e) { - throw new HiveException(e); - } + final TableName tn = HiveTableName.of(alterTbl.getOldName()); + db.dropConstraint(tn.getDb(), tn.getTable(), alterTbl.getConstraintName()); + } catch (NoSuchObjectException e) { + throw new HiveException(e); + } return 0; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 36bc08f34e..6cc77a0f39 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -97,6 +97,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.StringInternUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -2234,11 +2235,22 @@ public static String formatBinaryString(byte[] array, int start, int length) { * @param dbtable * @return String array with two elements, first is db name, second is table name * @throws SemanticException + * @deprecated use {@link TableName} or {@link org.apache.hadoop.hive.ql.parse.HiveTableName} instead */ + @Deprecated public static String[] getDbTableName(String dbtable) throws SemanticException { return getDbTableName(SessionState.get().getCurrentDatabase(), dbtable); } + /** + * Extract db and table name from dbtable string. + * @param defaultDb + * @param dbtable + * @return String array with two elements, first is db name, second is table name + * @throws SemanticException + * @deprecated use {@link TableName} or {@link org.apache.hadoop.hive.ql.parse.HiveTableName} instead + */ + @Deprecated public static String[] getDbTableName(String defaultDb, String dbtable) throws SemanticException { if (dbtable == null) { return new String[2]; @@ -2254,36 +2266,6 @@ public static String formatBinaryString(byte[] array, int start, int length) { } } - /** - * Accepts qualified name which is in the form of dbname.tablename and returns dbname from it - * - * @param dbTableName - * @return dbname - * @throws SemanticException input string is not qualified name - */ - public static String getDatabaseName(String dbTableName) throws SemanticException { - String[] split = dbTableName.split("\\."); - if (split.length != 2) { - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, dbTableName); - } - return split[0]; - } - - /** - * Accepts qualified name which is in the form of dbname.tablename and returns tablename from it - * - * @param dbTableName - * @return tablename - * @throws SemanticException input string is not qualified name - */ - public static String getTableName(String dbTableName) throws SemanticException { - String[] split = dbTableName.split("\\."); - if (split.length != 2) { - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, dbTableName); - } - return split[1]; - } - public static void validateColumnNames(List colNames, List checkCols) throws SemanticException { Iterator checkColsIter = checkCols.iterator(); @@ -2304,6 +2286,44 @@ public static void validateColumnNames(List colNames, List check } } + /** + * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a + * {@link TableName}. All parts can be null. + * + * @param dbTableName + * @return a {@link TableName} + * @throws SemanticException + * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} + */ + @Deprecated + public static TableName getNullableTableName(String dbTableName) throws SemanticException { + return getNullableTableName(dbTableName, SessionState.get().getCurrentDatabase()); + } + + /** + * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a + * {@link TableName}. All parts can be null. + * + * @param dbTableName + * @param defaultDb + * @return a {@link TableName} + * @throws SemanticException + * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} + */ + @Deprecated + public static TableName getNullableTableName(String dbTableName, String defaultDb) throws SemanticException { + if (dbTableName == null) { + return new TableName(null, null, null); + } else { + try { + return TableName + .fromString(dbTableName, SessionState.get().getCurrentCatalog(), defaultDb); + } catch (IllegalArgumentException e) { + throw new SemanticException(e.getCause()); + } + } + } + /** * Gets the default notification interval to send progress updates to the tracker. Useful for * operators that may not output data for a while. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java index 0add38b213..b4c269068a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.ReplLogger; @@ -349,7 +350,7 @@ private Path locationOnReplicaWarehouse(Table table, AddPartitionDesc.OnePartiti Map> partSpecsExpr = ReplUtils.genPartSpecs(table, Collections.singletonList(partSpec)); if (partSpecsExpr.size() > 0) { - DropPartitionDesc dropPtnDesc = new DropPartitionDesc(table.getFullyQualifiedName(), partSpecsExpr, true, + DropPartitionDesc dropPtnDesc = new DropPartitionDesc(HiveTableName.of(table), partSpecsExpr, true, event.replicationSpec()); dropPtnTask = TaskFactory.get( new DDLWork(new HashSet<>(), new HashSet<>(), dropPtnDesc), context.hiveConf diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index e6779b24a5..9c236a2c93 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -38,11 +38,11 @@ import org.antlr.runtime.TokenRewriteStream; import org.antlr.runtime.tree.Tree; -import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; @@ -398,15 +398,10 @@ public static String getUnescapedName(ASTNode tableOrColumnNode, String currentD if (tokenType == HiveParser.TOK_TABNAME) { // table node Map.Entry dbTablePair = getDbTableNamePair(tableOrColumnNode); - String dbName = dbTablePair.getKey(); - String tableName = dbTablePair.getValue(); - if (dbName != null){ - return dbName + "." + tableName; - } - if (currentDatabase != null) { - return currentDatabase + "." + tableName; - } - return tableName; + return TableName.fromString(dbTablePair.getValue(), + null, + dbTablePair.getKey() == null ? currentDatabase : dbTablePair.getKey()) + .getNotEmptyDbTable(); } else if (tokenType == HiveParser.StringLiteral) { return unescapeSQLString(tableOrColumnNode.getText()); } @@ -414,32 +409,42 @@ public static String getUnescapedName(ASTNode tableOrColumnNode, String currentD return unescapeIdentifier(tableOrColumnNode.getText()); } - public static String[] getQualifiedTableName(ASTNode tabNameNode) throws SemanticException { - if (tabNameNode.getType() != HiveParser.TOK_TABNAME || - (tabNameNode.getChildCount() != 1 && tabNameNode.getChildCount() != 2)) { + /** + * Get the name reference of a DB table node. + * @param tabNameNode + * @return a {@link TableName}, not null. The catalog will be missing from this. + * @throws SemanticException + */ + public static TableName getQualifiedTableName(ASTNode tabNameNode) throws SemanticException { + // Ideally this would be removed, once the catalog is accessible in all use cases + return getQualifiedTableName(tabNameNode, null); + } + + /** + * Get the name reference of a DB table node. + * @param tabNameNode + * @param catalogName the catalog of the DB/object + * @return a {@link TableName}, not null. The catalog will be missing from this. + * @throws SemanticException + */ + public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalogName) throws SemanticException { + if (tabNameNode.getType() != HiveParser.TOK_TABNAME || (tabNameNode.getChildCount() != 1 + && tabNameNode.getChildCount() != 2)) { throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME.getMsg(tabNameNode)); } if (tabNameNode.getChildCount() == 2) { - String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); - String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); + final String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + final String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); if (dbName.contains(".") || tableName.contains(".")) { throw new SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tabNameNode)); } - return new String[] {dbName, tableName}; + return HiveTableName.ofNullable(tableName, dbName); } - String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + final String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); if (tableName.contains(".")) { throw new SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tabNameNode)); } - return Utilities.getDbTableName(tableName); - } - - public static String getDotName(String[] qname) throws SemanticException { - String genericName = StringUtils.join(qname, "."); - if (qname.length != 2) { - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, genericName); - } - return genericName; + return HiveTableName.ofNullable(tableName); } /** @@ -704,112 +709,104 @@ private static String spliceString(String str, int i, int length, String replace /** * Process the primary keys from the ast node and populate the SQLPrimaryKey list. */ - protected static void processPrimaryKeys(String databaseName, String tableName, - ASTNode child, List primaryKeys) throws SemanticException { + protected static void processPrimaryKeys(TableName tName, ASTNode child, List primaryKeys) + throws SemanticException { List primaryKeyInfos = new ArrayList(); generateConstraintInfos(child, primaryKeyInfos); - constraintInfosToPrimaryKeys(databaseName, tableName, primaryKeyInfos, primaryKeys); + constraintInfosToPrimaryKeys(tName, primaryKeyInfos, primaryKeys); } - protected static void processPrimaryKeys(String databaseName, String tableName, - ASTNode child, List columnNames, List primaryKeys) - throws SemanticException { + protected static void processPrimaryKeys(TableName tName, ASTNode child, List columnNames, + List primaryKeys) throws SemanticException { List primaryKeyInfos = new ArrayList(); generateConstraintInfos(child, columnNames, primaryKeyInfos, null, null); - constraintInfosToPrimaryKeys(databaseName, tableName, primaryKeyInfos, primaryKeys); + constraintInfosToPrimaryKeys(tName, primaryKeyInfos, primaryKeys); } - private static void constraintInfosToPrimaryKeys(String databaseName, String tableName, - List primaryKeyInfos, List primaryKeys) { + private static void constraintInfosToPrimaryKeys(TableName tName, List primaryKeyInfos, + List primaryKeys) { int i = 1; for (ConstraintInfo primaryKeyInfo : primaryKeyInfos) { - primaryKeys.add(new SQLPrimaryKey(databaseName, tableName, primaryKeyInfo.colName, - i++, primaryKeyInfo.constraintName, primaryKeyInfo.enable, - primaryKeyInfo.validate, primaryKeyInfo.rely)); + primaryKeys.add( + new SQLPrimaryKey(tName.getDb(), tName.getTable(), primaryKeyInfo.colName, i++, primaryKeyInfo.constraintName, + primaryKeyInfo.enable, primaryKeyInfo.validate, primaryKeyInfo.rely)); } } /** * Process the unique constraints from the ast node and populate the SQLUniqueConstraint list. */ - protected static void processUniqueConstraints(String catName, String databaseName, String tableName, - ASTNode child, List uniqueConstraints) throws SemanticException { + protected static void processUniqueConstraints(TableName tName, ASTNode child, + List uniqueConstraints) throws SemanticException { List uniqueInfos = new ArrayList(); generateConstraintInfos(child, uniqueInfos); - constraintInfosToUniqueConstraints(catName, databaseName, tableName, uniqueInfos, uniqueConstraints); + constraintInfosToUniqueConstraints(tName, uniqueInfos, uniqueConstraints); } - protected static void processUniqueConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, List uniqueConstraints) - throws SemanticException { + protected static void processUniqueConstraints(TableName tName, ASTNode child, List columnNames, + List uniqueConstraints) throws SemanticException { List uniqueInfos = new ArrayList(); generateConstraintInfos(child, columnNames, uniqueInfos, null, null); - constraintInfosToUniqueConstraints(catName, databaseName, tableName, uniqueInfos, uniqueConstraints); + constraintInfosToUniqueConstraints(tName, uniqueInfos, uniqueConstraints); } - private static void constraintInfosToUniqueConstraints(String catName, String databaseName, String tableName, - List uniqueInfos, List uniqueConstraints) { + private static void constraintInfosToUniqueConstraints(TableName tName, List uniqueInfos, + List uniqueConstraints) { int i = 1; for (ConstraintInfo uniqueInfo : uniqueInfos) { - uniqueConstraints.add(new SQLUniqueConstraint(catName, databaseName, tableName, uniqueInfo.colName, - i++, uniqueInfo.constraintName, uniqueInfo.enable, uniqueInfo.validate, uniqueInfo.rely)); + uniqueConstraints.add( + new SQLUniqueConstraint(tName.getCat(), tName.getDb(), tName.getTable(), uniqueInfo.colName, i++, + uniqueInfo.constraintName, uniqueInfo.enable, uniqueInfo.validate, uniqueInfo.rely)); } } - protected static void processCheckConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, - List checkConstraints, final ASTNode typeChild, - final TokenRewriteStream tokenRewriteStream) + protected static void processCheckConstraints(TableName tName, ASTNode child, List columnNames, + List checkConstraints, final ASTNode typeChild, final TokenRewriteStream tokenRewriteStream) throws SemanticException { List checkInfos = new ArrayList(); generateConstraintInfos(child, columnNames, checkInfos, typeChild, tokenRewriteStream); - constraintInfosToCheckConstraints(catName, databaseName, tableName, checkInfos, checkConstraints); + constraintInfosToCheckConstraints(tName, checkInfos, checkConstraints); } - private static void constraintInfosToCheckConstraints(String catName, String databaseName, String tableName, - List checkInfos, - List checkConstraints) { + private static void constraintInfosToCheckConstraints(TableName tName, List checkInfos, + List checkConstraints) { for (ConstraintInfo checkInfo : checkInfos) { - checkConstraints.add(new SQLCheckConstraint(catName, databaseName, tableName, checkInfo.colName, - checkInfo.defaultValue, checkInfo.constraintName, checkInfo.enable, - checkInfo.validate, checkInfo.rely)); + checkConstraints.add(new SQLCheckConstraint(tName.getCat(), tName.getDb(), tName.getTable(), checkInfo.colName, + checkInfo.defaultValue, checkInfo.constraintName, checkInfo.enable, checkInfo.validate, checkInfo.rely)); } } - protected static void processDefaultConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, List defaultConstraints, final ASTNode typeChild, - final TokenRewriteStream tokenRewriteStream) - throws SemanticException { + protected static void processDefaultConstraints(TableName tName, ASTNode child, List columnNames, + List defaultConstraints, final ASTNode typeChild, + final TokenRewriteStream tokenRewriteStream) throws SemanticException { List defaultInfos = new ArrayList(); generateConstraintInfos(child, columnNames, defaultInfos, typeChild, tokenRewriteStream); - constraintInfosToDefaultConstraints(catName, databaseName, tableName, defaultInfos, defaultConstraints); + constraintInfosToDefaultConstraints(tName, defaultInfos, defaultConstraints); } - private static void constraintInfosToDefaultConstraints( - String catName, String databaseName, String tableName, - List defaultInfos, List defaultConstraints) { + private static void constraintInfosToDefaultConstraints(TableName tName, List defaultInfos, + List defaultConstraints) { for (ConstraintInfo defaultInfo : defaultInfos) { - defaultConstraints.add(new SQLDefaultConstraint(catName, databaseName, tableName, - defaultInfo.colName, defaultInfo.defaultValue, defaultInfo.constraintName, - defaultInfo.enable, defaultInfo.validate, defaultInfo.rely)); + defaultConstraints.add( + new SQLDefaultConstraint(tName.getCat(), tName.getDb(), tName.getTable(), defaultInfo.colName, + defaultInfo.defaultValue, defaultInfo.constraintName, defaultInfo.enable, defaultInfo.validate, + defaultInfo.rely)); } } - protected static void processNotNullConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, List notNullConstraints) - throws SemanticException { + protected static void processNotNullConstraints(TableName tName, ASTNode child, List columnNames, + List notNullConstraints) throws SemanticException { List notNullInfos = new ArrayList(); generateConstraintInfos(child, columnNames, notNullInfos, null, null); - constraintInfosToNotNullConstraints(catName, databaseName, tableName, notNullInfos, notNullConstraints); + constraintInfosToNotNullConstraints(tName, notNullInfos, notNullConstraints); } - private static void constraintInfosToNotNullConstraints( - String catName, String databaseName, String tableName, List notNullInfos, + private static void constraintInfosToNotNullConstraints(TableName tName, List notNullInfos, List notNullConstraints) { for (ConstraintInfo notNullInfo : notNullInfos) { - notNullConstraints.add(new SQLNotNullConstraint(catName, databaseName, tableName, - notNullInfo.colName, notNullInfo.constraintName, notNullInfo.enable, notNullInfo.validate, - notNullInfo.rely)); + notNullConstraints.add( + new SQLNotNullConstraint(tName.getCat(), tName.getDb(), tName.getTable(), notNullInfo.colName, + notNullInfo.constraintName, notNullInfo.enable, notNullInfo.validate, notNullInfo.rely)); } } @@ -1094,12 +1091,13 @@ else if(child.getToken().getType() == HiveParser.TOK_CHECK_CONSTRAINT) { /** * Process the foreign keys from the AST and populate the foreign keys in the SQLForeignKey list + * @param tName catalog/db/table name reference * @param child Foreign Key token node * @param foreignKeys SQLForeignKey list * @throws SemanticException */ - protected static void processForeignKeys(String databaseName, String tableName, - ASTNode child, List foreignKeys) throws SemanticException { + protected static void processForeignKeys(TableName tName, ASTNode child, List foreignKeys) + throws SemanticException { // The ANTLR grammar looks like : // 1. KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList // KW_REFERENCES tabName=tableName parCols=columnParenthesesList @@ -1157,16 +1155,16 @@ protected static void processForeignKeys(String databaseName, String tableName, " The number of foreign key columns should be same as number of parent key columns ")); } - String[] parentDBTbl = getQualifiedTableName((ASTNode) child.getChild(ptIndex)); + final TableName parentTblName = getQualifiedTableName((ASTNode) child.getChild(ptIndex)); for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) { SQLForeignKey sqlForeignKey = new SQLForeignKey(); - sqlForeignKey.setFktable_db(databaseName); - sqlForeignKey.setFktable_name(tableName); + sqlForeignKey.setFktable_db(tName.getDb()); + sqlForeignKey.setFktable_name(tName.getTable()); Tree fkgrandChild = child.getChild(fkIndex).getChild(j); checkColumnName(fkgrandChild.getText()); sqlForeignKey.setFkcolumn_name(unescapeIdentifier(fkgrandChild.getText().toLowerCase())); - sqlForeignKey.setPktable_db(parentDBTbl[0]); - sqlForeignKey.setPktable_name(parentDBTbl[1]); + sqlForeignKey.setPktable_db(parentTblName.getDb()); + sqlForeignKey.setPktable_name(parentTblName.getTable()); Tree pkgrandChild = child.getChild(pkIndex).getChild(j); sqlForeignKey.setPkcolumn_name(unescapeIdentifier(pkgrandChild.getText().toLowerCase())); sqlForeignKey.setKey_seq(j+1); @@ -1221,34 +1219,33 @@ private static void checkColumnName(String columnName) throws SemanticException ASTNode child = (ASTNode) ast.getChild(i); switch (child.getToken().getType()) { case HiveParser.TOK_UNIQUE: { - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + final TableName tName = + getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); - processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], child, - uniqueConstraints); + processUniqueConstraints(tName, child, uniqueConstraints); } break; case HiveParser.TOK_PRIMARY_KEY: { if (!primaryKeys.isEmpty()) { - throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg( - "Cannot exist more than one primary key definition for the same table")); + throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT + .getMsg("Cannot exist more than one primary key definition for the same table")); } - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys); + final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0)); + processPrimaryKeys(tName, child, primaryKeys); } break; case HiveParser.TOK_FOREIGN_KEY: { - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys); + final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0)); + processForeignKeys(tName, child, foreignKeys); } break; case HiveParser.TOK_CHECK_CONSTRAINT: { + final TableName tName = + getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], child, null, + processCheckConstraints(tName, child, null, checkConstraints, null, tokenRewriteStream); } break; @@ -1279,39 +1276,35 @@ private static void checkColumnName(String columnName) throws SemanticException constraintChild = (ASTNode) child.getChild(2); } if (constraintChild != null) { - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + final TableName tName = + getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); // Process column constraint switch (constraintChild.getToken().getType()) { case HiveParser.TOK_CHECK_CONSTRAINT: - processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), checkConstraints, typeChild, - tokenRewriteStream); + processCheckConstraints(tName, constraintChild, ImmutableList.of(col.getName()), checkConstraints, + typeChild, tokenRewriteStream); break; case HiveParser.TOK_DEFAULT_VALUE: - processDefaultConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream); + processDefaultConstraints(tName, constraintChild, ImmutableList.of(col.getName()), defaultConstraints, + typeChild, tokenRewriteStream); break; case HiveParser.TOK_NOT_NULL: - processNotNullConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), notNullConstraints); + processNotNullConstraints(tName, constraintChild, ImmutableList.of(col.getName()), notNullConstraints); break; case HiveParser.TOK_UNIQUE: - processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), uniqueConstraints); + processUniqueConstraints(tName, constraintChild, ImmutableList.of(col.getName()), uniqueConstraints); break; case HiveParser.TOK_PRIMARY_KEY: if (!primaryKeys.isEmpty()) { - throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg( - "Cannot exist more than one primary key definition for the same table")); + throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT + .getMsg("Cannot exist more than one primary key definition for the same table")); } - processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), primaryKeys); + processPrimaryKeys(tName, constraintChild, ImmutableList.of(col.getName()), primaryKeys); break; case HiveParser.TOK_FOREIGN_KEY: - processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, + processForeignKeys(tName, constraintChild, foreignKeys); break; default: @@ -1426,7 +1419,7 @@ private static String getUnionTypeStringFromAST(ASTNode typeNode) * */ public static class TableSpec { - public String tableName; + private TableName tableName; public Table tableHandle; public Map partSpec; // has to use LinkedHashMap to enforce order public Partition partHandle; @@ -1442,7 +1435,7 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast) public TableSpec(Table table) { tableHandle = table; - tableName = table.getDbName() + "." + table.getTableName(); + tableName = TableName.fromString(table.getTableName(), SessionState.get().getCurrentCatalog(), table.getDbName()); specType = SpecType.TABLE_ONLY; } @@ -1454,7 +1447,8 @@ public TableSpec(Hive db, String tableName, Map partSpec, boolea throws HiveException { Table table = db.getTable(tableName); tableHandle = table; - this.tableName = table.getDbName() + "." + table.getTableName(); + this.tableName = TableName.fromString(table.getTableName(), SessionState.get().getCurrentCatalog(), + table.getDbName()); if (partSpec == null) { specType = SpecType.TABLE_ONLY; } else if(allowPartialPartitionsSpec) { @@ -1474,7 +1468,8 @@ public TableSpec(Hive db, String tableName, Map partSpec, boolea public TableSpec(Table tableHandle, List partitions) throws HiveException { this.tableHandle = tableHandle; - this.tableName = tableHandle.getTableName(); + this.tableName = + TableName.fromString(tableHandle.getTableName(), tableHandle.getCatalogName(), tableHandle.getDbName()); if (partitions != null && !partitions.isEmpty()) { this.specType = SpecType.STATIC_PARTITION; this.partitions = partitions; @@ -1513,16 +1508,16 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit try { // get table metadata - tableName = getUnescapedName((ASTNode)ast.getChild(0)); + tableName = HiveTableName.withNoDefault(getUnescapedName((ASTNode)ast.getChild(0))); boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE); if (testMode) { - tableName = conf.getVar(HiveConf.ConfVars.HIVETESTMODEPREFIX) - + tableName; + tableName = TableName.fromString(String.join("", conf.getVar(HiveConf.ConfVars.HIVETESTMODEPREFIX), + tableName.getTable()), tableName.getCat(), tableName.getDb()); // not that elegant, but hard to refactor } if (ast.getToken().getType() != HiveParser.TOK_CREATETABLE && ast.getToken().getType() != HiveParser.TOK_CREATE_MATERIALIZED_VIEW && ast.getToken().getType() != HiveParser.TOK_ALTER_MATERIALIZED_VIEW) { - tableHandle = db.getTable(tableName); + tableHandle = db.getTable(tableName.getTable()); } } catch (InvalidTableException ite) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(ast @@ -1638,6 +1633,14 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit } } + public TableName getTableName() { + return tableName; + } + + public void setTableName(TableName tableName) { + this.tableName = tableName; + } + public Map getPartSpec() { return this.partSpec; } @@ -2172,12 +2175,12 @@ protected Database getDatabase(String dbName, boolean throwException) throws Sem return database; } - protected Table getTable(String[] qualified) throws SemanticException { - return getTable(qualified[0], qualified[1], true); + protected Table getTable(TableName tn) throws SemanticException { + return getTable(tn, true); } - protected Table getTable(String[] qualified, boolean throwException) throws SemanticException { - return getTable(qualified[0], qualified[1], throwException); + protected Table getTable(TableName tn, boolean throwException) throws SemanticException { + return getTable(tn.getDb(), tn.getTable(), throwException); } protected Table getTable(String tblName) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 20501cf75e..dc26109c80 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -2226,8 +2226,8 @@ private RelNode applyMaterializedViewRewriting(RelOptPlanner planner, RelNode ba // We only retrieve the materialization corresponding to the rebuild. In turn, // we pass 'true' for the forceMVContentsUpToDate parameter, as we cannot allow the // materialization contents to be stale for a rebuild if we want to use it. - materializations = db.getValidMaterializedView(mvRebuildDbName, mvRebuildName, - getTablesUsed(basePlan), true, getTxnMgr()); + materializations = + db.getValidMaterializedView(mvRebuildDbName, mvRebuildName, getTablesUsed(basePlan), true, getTxnMgr()); } else { // This is not a rebuild, we retrieve all the materializations. In turn, we do not need // to force the materialization contents to be up-to-date, as this is not a rebuild, and diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index d2c3f7b1a6..cc3d1eece5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; @@ -284,11 +285,10 @@ public void analyzeInternal(ASTNode input) throws SemanticException { switch (ast.getType()) { case HiveParser.TOK_ALTERTABLE: { ast = (ASTNode) input.getChild(1); - String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0)); + final TableName tName = + getQualifiedTableName((ASTNode) input.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); - String tableName = getDotName(qualified); HashMap partSpec = null; ASTNode partSpecNode = (ASTNode)input.getChild(2); if (partSpecNode != null) { @@ -298,70 +298,70 @@ public void analyzeInternal(ASTNode input) throws SemanticException { if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) { partSpec = getPartSpec(partSpecNode); } else { - partSpec = getValidatedPartSpec(getTable(tableName), partSpecNode, conf, false); + partSpec = getValidatedPartSpec(getTable(tName.getTable()), partSpecNode, conf, false); } } if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAME) { - analyzeAlterTableRename(qualified, ast, false); + analyzeAlterTableRename(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { - analyzeAlterTableTouch(qualified, ast); + analyzeAlterTableTouch(tName, ast); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ARCHIVE) { - analyzeAlterTableArchive(qualified, ast, false); + analyzeAlterTableArchive(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { - analyzeAlterTableArchive(qualified, ast, true); + analyzeAlterTableArchive(tName, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { - analyzeAlterTableModifyCols(qualified, ast, partSpec, AlterTableTypes.ADDCOLS); + analyzeAlterTableModifyCols(tName, ast, partSpec, AlterTableTypes.ADDCOLS); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { - analyzeAlterTableModifyCols(qualified, ast, partSpec, AlterTableTypes.REPLACECOLS); + analyzeAlterTableModifyCols(tName, ast, partSpec, AlterTableTypes.REPLACECOLS); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { - analyzeAlterTableRenameCol(catName, qualified, ast, partSpec); + analyzeAlterTableRenameCol(tName, ast, partSpec); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) { - analyzeAlterTableAddParts(qualified, ast, false); + analyzeAlterTableAddParts(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) { - analyzeAlterTableDropParts(qualified, ast, false); + analyzeAlterTableDropParts(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PARTCOLTYPE) { - analyzeAlterTablePartColType(qualified, ast); + analyzeAlterTablePartColType(tName, ast); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) { - analyzeAlterTableProps(qualified, null, ast, false, false); + analyzeAlterTableProps(tName, null, ast, false, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPROPERTIES) { - analyzeAlterTableProps(qualified, null, ast, false, true); + analyzeAlterTableProps(tName, null, ast, false, true); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UPDATESTATS) { - analyzeAlterTableProps(qualified, partSpec, ast, false, false); + analyzeAlterTableProps(tName, partSpec, ast, false, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_SKEWED) { - analyzeAltertableSkewedby(qualified, ast); + analyzeAltertableSkewedby(tName, ast); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION) { - analyzeExchangePartition(qualified, ast); + analyzeExchangePartition(tName, ast); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { - analyzeAlterTableFileFormat(ast, tableName, partSpec); + analyzeAlterTableFileFormat(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_LOCATION) { - analyzeAlterTableLocation(ast, tableName, partSpec); + analyzeAlterTableLocation(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_MERGEFILES) { - analyzeAlterTablePartMergeFiles(ast, tableName, partSpec); + analyzeAlterTablePartMergeFiles(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) { - analyzeAlterTableSerde(ast, tableName, partSpec); + analyzeAlterTableSerde(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) { - analyzeAlterTableSerdeProps(ast, tableName, partSpec); + analyzeAlterTableSerdeProps(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) { - analyzeAlterTableRenamePart(ast, tableName, partSpec); + analyzeAlterTableRenamePart(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION) { - analyzeAlterTableSkewedLocation(ast, tableName, partSpec); + analyzeAlterTableSkewedLocation(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_BUCKETS) { - analyzeAlterTableBucketNum(ast, tableName, partSpec); + analyzeAlterTableBucketNum(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) { - analyzeAlterTableClusterSort(ast, tableName, partSpec); + analyzeAlterTableClusterSort(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_COMPACT) { - analyzeAlterTableCompact(ast, tableName, partSpec); - } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS){ - analyzeAlterTableUpdateStats(ast, tableName, partSpec); - } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT) { - analyzeAlterTableDropConstraint(ast, tableName); - } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT) { - analyzeAlterTableAddConstraint(ast, tableName); - } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLUMNS) { - analyzeAlterTableUpdateColumns(ast, tableName, partSpec); + analyzeAlterTableCompact(ast, tName.getTable(), partSpec); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS) { + analyzeAlterTableUpdateStats(ast, tName.getTable(), partSpec); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT) { + analyzeAlterTableDropConstraint(ast, tName.getTable()); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT) { + analyzeAlterTableAddConstraint(ast, tName.getTable()); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLUMNS) { + analyzeAlterTableUpdateColumns(ast, tName.getTable(), partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_OWNER) { - analyzeAlterTableOwner(ast, tableName); + analyzeAlterTableOwner(ast, tName.getTable()); } break; } @@ -452,28 +452,27 @@ public void analyzeInternal(ASTNode input) throws SemanticException { analyzeDropTable(ast, TableType.MATERIALIZED_VIEW); break; case HiveParser.TOK_ALTERVIEW: { - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + final TableName tName = getQualifiedTableName((ASTNode) ast.getChild(0)); ast = (ASTNode) ast.getChild(1); if (ast.getType() == HiveParser.TOK_ALTERVIEW_PROPERTIES) { - analyzeAlterTableProps(qualified, null, ast, true, false); + analyzeAlterTableProps(tName, null, ast, true, false); } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_DROPPROPERTIES) { - analyzeAlterTableProps(qualified, null, ast, true, true); + analyzeAlterTableProps(tName, null, ast, true, true); } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_ADDPARTS) { - analyzeAlterTableAddParts(qualified, ast, true); + analyzeAlterTableAddParts(tName, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_DROPPARTS) { - analyzeAlterTableDropParts(qualified, ast, true); + analyzeAlterTableDropParts(tName, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_RENAME) { - analyzeAlterTableRename(qualified, ast, true); + analyzeAlterTableRename(tName, ast, true); } break; } case HiveParser.TOK_ALTER_MATERIALIZED_VIEW: { ast = (ASTNode) input.getChild(1); - String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0)); - String tableName = getDotName(qualified); + final TableName tName = getQualifiedTableName((ASTNode) input.getChild(0)); if (ast.getType() == HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REWRITE) { - analyzeAlterMaterializedViewRewrite(tableName, ast); + analyzeAlterMaterializedViewRewrite(tName.getDbTable(), ast); } break; } @@ -817,8 +816,8 @@ private void analyzeAlterDatabaseLocation(ASTNode ast) throws SemanticException addAlterDbDesc(alterDesc); } - private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws SemanticException { - Table destTable = getTable(qualified); + private void analyzeExchangePartition(TableName tName, ASTNode ast) throws SemanticException { + Table destTable = getTable(tName); Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(1))); // Get the partition specs @@ -1700,10 +1699,9 @@ private boolean hasConstraintsEnabled(final String tblName) throws SemanticExcep return false; } - private void analyzeAlterTableProps(String[] qualified, HashMap partSpec, - ASTNode ast, boolean expectView, boolean isUnset) throws SemanticException { + private void analyzeAlterTableProps(TableName tableName, Map partSpec, ASTNode ast, + boolean expectView, boolean isUnset) throws SemanticException { - String tableName = getDotName(qualified); HashMap mapProp = getProps((ASTNode) (ast.getChild(0)) .getChild(0)); EnvironmentContext environmentContext = null; @@ -1727,10 +1725,10 @@ private void analyzeAlterTableProps(String[] qualified, HashMap } // if table is being modified to be external we need to make sure existing table // doesn't have enabled constraint since constraints are disallowed with such tables - else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ - if(hasConstraintsEnabled(qualified[1])){ + else if (entry.getKey().equals("external") && entry.getValue().equals("true")) { + if (hasConstraintsEnabled(tableName.getTable())) { throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Table: " + tableName + " has constraints enabled." + ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Table: " + tableName.getDbTable() + " has constraints enabled." + "Please remove those constraints to change this property.")); } } @@ -1751,7 +1749,7 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ } } AlterTableDesc alterTblDesc = null; - if (isUnset == true) { + if (isUnset) { alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, partSpec, expectView); if (ast.getChild(1) != null) { alterTblDesc.setDropIfExists(true); @@ -1762,18 +1760,16 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ } alterTblDesc.setProps(mapProp); alterTblDesc.setEnvironmentContext(environmentContext); - alterTblDesc.setOldName(tableName); - - + alterTblDesc.setOldName(tableName.getDbTable()); - boolean isToTxn = AcidUtils.isTablePropertyTransactional(mapProp) - || mapProp.containsKey(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES); - addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, isToTxn); + boolean isToTxn = AcidUtils.isTablePropertyTransactional(mapProp) || mapProp + .containsKey(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES); + addInputsOutputsAlterTable(tableName.getDbTable(), partSpec, alterTblDesc, isToTxn); // This special handling is because we cannot generate write ID for full ACID conversion, // it will break the weird 10000001-write-ID logic that is currently in use. However, we do // want to generate a write ID for prop changes for existing txn tables, or MM conversion. - boolean isAcidConversion = isToTxn && AcidUtils.isFullAcidTable(mapProp) - && !AcidUtils.isFullAcidTable(getTable(qualified, true)); + boolean isAcidConversion = + isToTxn && AcidUtils.isFullAcidTable(mapProp) && !AcidUtils.isFullAcidTable(getTable(tableName, true)); DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), alterTblDesc); if (isToTxn) { @@ -1782,7 +1778,7 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ ddlWork.setNeedLock(true); // Hmm... why don't many other operations here need locks? } if (changeStatsSucceeded) { - Table table = getTable(qualified, true); + Table table = getTable(tableName, true); if (AcidUtils.isTransactionalTable(table)) { alterTblDesc.setIsExplicitStatsUpdate(true); setAcidDdlDesc(alterTblDesc); @@ -2236,10 +2232,9 @@ private void analyzeAlterTableDropConstraint(ASTNode ast, String tableName) private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throws SemanticException { ASTNode parent = (ASTNode) ast.getParent(); - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); ASTNode child = (ASTNode) ast.getChild(0); List primaryKeys = new ArrayList<>(); List foreignKeys = new ArrayList<>(); @@ -2248,21 +2243,17 @@ private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) switch (child.getToken().getType()) { case HiveParser.TOK_UNIQUE: - BaseSemanticAnalyzer.processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], - child, uniqueConstraints); + BaseSemanticAnalyzer.processUniqueConstraints(tName, child, uniqueConstraints); break; case HiveParser.TOK_PRIMARY_KEY: - BaseSemanticAnalyzer.processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], - child, primaryKeys); + BaseSemanticAnalyzer.processPrimaryKeys(tName, child, primaryKeys); break; case HiveParser.TOK_FOREIGN_KEY: - BaseSemanticAnalyzer.processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], - child, foreignKeys); + BaseSemanticAnalyzer.processForeignKeys(tName, child, foreignKeys); break; case HiveParser.TOK_CHECK_CONSTRAINT: - BaseSemanticAnalyzer.processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], - child, null, checkConstraints, child, - this.ctx.getTokenRewriteStream()); + BaseSemanticAnalyzer + .processCheckConstraints(tName, child, null, checkConstraints, child, this.ctx.getTokenRewriteStream()); break; default: throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( @@ -2338,16 +2329,12 @@ static public String getFullyQualifiedName(ASTNode ast) { // return column name if exists, column could be DOT separated. // example: lintString.$elem$.myint // return table name for column name if no column has been specified. - static public String getColPath( - Hive db, - ASTNode node, - String dbName, - String tableName, - Map partSpec) throws SemanticException { + static String getColPath(Hive db, ASTNode node, TableName tableName, Map partSpec) + throws SemanticException { // if this ast has only one child, then no column name specified. if (node.getChildCount() == 1) { - return tableName; + return tableName.getTable(); } ASTNode columnNode = null; @@ -2361,19 +2348,14 @@ static public String getColPath( } if (columnNode != null) { - if (dbName == null) { - return tableName + "." + QualifiedNameUtil.getFullyQualifiedName(columnNode); - } else { - return tableName.substring(dbName.length() + 1, tableName.length()) + "." + - QualifiedNameUtil.getFullyQualifiedName(columnNode); - } + return String.join(".", tableName.getTable(), QualifiedNameUtil.getFullyQualifiedName(columnNode)); } else { - return tableName; + return tableName.getNotEmptyDbTable(); } } // get partition metadata - static public Map getPartitionSpec(Hive db, ASTNode ast, String tableName) + static Map getPartitionSpec(Hive db, ASTNode ast, TableName tableName) throws SemanticException { ASTNode partNode = null; // if this ast has only one child, then no partition spec specified. @@ -2395,10 +2377,10 @@ static public String getColPath( if (partNode != null) { Table tab = null; try { - tab = db.getTable(tableName); + tab = db.getTable(tableName.getNotEmptyDbTable()); } catch (InvalidTableException e) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e); + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName.getNotEmptyDbTable()), e); } catch (HiveException e) { throw new SemanticException(e.getMessage(), e); @@ -2455,9 +2437,9 @@ private void validateDatabase(String databaseName) throws SemanticException { } } - private void validateTable(String tableName, Map partSpec) + private void validateTable(TableName tableName, Map partSpec) throws SemanticException { - Table tab = getTable(tableName); + Table tab = getTable(tableName.getNotEmptyDbTable()); if (partSpec != null) { getPartition(tab, partSpec, true); } @@ -2480,8 +2462,7 @@ private void validateTable(String tableName, Map partSpec) private void analyzeDescribeTable(ASTNode ast) throws SemanticException { ASTNode tableTypeExpr = (ASTNode) ast.getChild(0); - String dbName = null; - String tableName = null; + final TableName tableName; String colPath = null; Map partSpec = null; @@ -2492,10 +2473,10 @@ private void analyzeDescribeTable(ASTNode ast) throws SemanticException { if (((ASTNode) tableTypeExpr.getChild(0)).getType() == HiveParser.TOK_TABNAME) { tableNode = (ASTNode) tableTypeExpr.getChild(0); if (tableNode.getChildCount() == 1) { - tableName = ((ASTNode) tableNode.getChild(0)).getText(); + tableName = HiveTableName.withNoDefault(((ASTNode) tableNode.getChild(0)).getText()); } else { - dbName = ((ASTNode) tableNode.getChild(0)).getText(); - tableName = dbName + "." + ((ASTNode) tableNode.getChild(1)).getText(); + tableName = TableName.fromString(((ASTNode) tableNode.getChild(1)).getText(), + SessionState.get().getCurrentCatalog(), ((ASTNode) tableNode.getChild(0)).getText()); } } else { throw new SemanticException(((ASTNode) tableTypeExpr.getChild(0)).getText() + " is not an expected token type"); @@ -2505,12 +2486,12 @@ private void analyzeDescribeTable(ASTNode ast) throws SemanticException { partSpec = QualifiedNameUtil.getPartitionSpec(db, tableTypeExpr, tableName); // process the third child node,if exists, to get partition spec(s) - colPath = QualifiedNameUtil.getColPath(db, tableTypeExpr, dbName, tableName, partSpec); + colPath = QualifiedNameUtil.getColPath(db, tableTypeExpr, tableName, partSpec); // if database is not the one currently using // validate database - if (dbName != null) { - validateDatabase(dbName); + if (tableName.getDb() != null) { + validateDatabase(tableName.getDb()); } if (partSpec != null) { validateTable(tableName, partSpec); @@ -2527,14 +2508,15 @@ private void analyzeDescribeTable(ASTNode ast) throws SemanticException { // will contain tablename.column_name. If column_name is not specified // colPath will be equal to tableName. This is how we can differentiate // if we are describing a table or column - if (!colPath.equalsIgnoreCase(tableName) && isFormatted) { + if (!colPath.equalsIgnoreCase(tableName.getTable()) && isFormatted) { showColStats = true; } } inputs.add(new ReadEntity(getTable(tableName))); - DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, partSpec, colPath, isExt, isFormatted); + DescTableDesc descTblDesc = + new DescTableDesc(ctx.getResFile(), tableName.getTable(), partSpec, colPath, isExt, isFormatted); Task ddlTask = TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), descTblDesc)); rootTasks.add(ddlTask); String schema = DescTableDesc.getSchema(showColStats); @@ -2607,7 +2589,7 @@ private void analyzeShowPartitions(ASTNode ast) throws SemanticException { partSpec = partSpecs.get(0); } - validateTable(tableName, null); + validateTable(HiveTableName.ofNullableWithNoDefault(tableName), null); showPartsDesc = new ShowPartitionsDesc(tableName, ctx.getResFile(), partSpec); inputs.add(new ReadEntity(getTable(tableName))); @@ -2754,7 +2736,7 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { } if (partSpec != null) { - validateTable(tableNames, partSpec); + validateTable(HiveTableName.ofNullableWithNoDefault(tableNames), partSpec); } showTblStatusDesc = new ShowTableStatusDesc(ctx.getResFile().toString(), dbName, tableNames, partSpec); @@ -2764,16 +2746,15 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { private void analyzeShowTableProperties(ASTNode ast) throws SemanticException { ShowTablePropertiesDesc showTblPropertiesDesc; - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + TableName qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); String propertyName = null; if (ast.getChildCount() > 1) { propertyName = unescapeSQLString(ast.getChild(1).getText()); } - String tableNames = getDotName(qualified); - validateTable(tableNames, null); + validateTable(qualified, null); - showTblPropertiesDesc = new ShowTablePropertiesDesc(ctx.getResFile().toString(), tableNames, propertyName); + showTblPropertiesDesc = new ShowTablePropertiesDesc(ctx.getResFile().toString(), qualified, propertyName); rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showTblPropertiesDesc))); setFetchTask(createFetchTask(ShowTablePropertiesDesc.SCHEMA)); } @@ -3157,25 +3138,22 @@ private void analyzeDescFunction(ASTNode ast) throws SemanticException { } - private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expectView) + private void analyzeAlterTableRename(TableName source, ASTNode ast, boolean expectView) throws SemanticException { - String[] target = getQualifiedTableName((ASTNode) ast.getChild(0)); - - String sourceName = getDotName(source); - String targetName = getDotName(target); + final TableName target = getQualifiedTableName((ASTNode) ast.getChild(0)); - AlterTableDesc alterTblDesc = new AlterTableDesc(sourceName, targetName, expectView, null); - Table table = getTable(sourceName, true); + AlterTableDesc alterTblDesc = new AlterTableDesc(source.getDbTable(), target.getDbTable(), expectView, null); + Table table = getTable(source.getDbTable(), true); if (AcidUtils.isTransactionalTable(table)) { setAcidDdlDesc(alterTblDesc); } - addInputsOutputsAlterTable(sourceName, null, alterTblDesc); + addInputsOutputsAlterTable(source.getDbTable(), null, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } - private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTNode ast, - HashMap partSpec) throws SemanticException { + private void analyzeAlterTableRenameCol(TableName tName, ASTNode ast, Map partSpec) + throws SemanticException { String newComment = null; boolean first = false; String flagCol = null; @@ -3218,35 +3196,29 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN switch (constraintChild.getToken().getType()) { case HiveParser.TOK_CHECK_CONSTRAINT: checkConstraints = new ArrayList<>(); - processCheckConstraints(catName, qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), checkConstraints, (ASTNode)ast.getChild(2), - this.ctx.getTokenRewriteStream()); + processCheckConstraints(tName, constraintChild, ImmutableList.of(newColName), checkConstraints, + (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); break; case HiveParser.TOK_DEFAULT_VALUE: defaultConstraints = new ArrayList<>(); - processDefaultConstraints(catName, qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), defaultConstraints, (ASTNode)ast.getChild(2), - this.ctx.getTokenRewriteStream()); + processDefaultConstraints(tName, constraintChild, ImmutableList.of(newColName), defaultConstraints, + (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); break; case HiveParser.TOK_NOT_NULL: notNullConstraints = new ArrayList<>(); - processNotNullConstraints(catName, qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), notNullConstraints); + processNotNullConstraints(tName, constraintChild, ImmutableList.of(newColName), notNullConstraints); break; case HiveParser.TOK_UNIQUE: uniqueConstraints = new ArrayList<>(); - processUniqueConstraints(catName, qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), uniqueConstraints); + processUniqueConstraints(tName, constraintChild, ImmutableList.of(newColName), uniqueConstraints); break; case HiveParser.TOK_PRIMARY_KEY: primaryKeys = new ArrayList<>(); - processPrimaryKeys(qualified[0], qualified[1], constraintChild, - ImmutableList.of(newColName), primaryKeys); + processPrimaryKeys(tName, constraintChild, ImmutableList.of(newColName), primaryKeys); break; case HiveParser.TOK_FOREIGN_KEY: foreignKeys = new ArrayList<>(); - processForeignKeys(qualified[0], qualified[1], constraintChild, - foreignKeys); + processForeignKeys(tName, constraintChild, foreignKeys); break; default: throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( @@ -3255,7 +3227,7 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN } /* Validate the operation of renaming a column name. */ - Table tab = getTable(qualified); + Table tab = getTable(tName); if(checkConstraints != null && !checkConstraints.isEmpty()) { validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf()); @@ -3276,21 +3248,18 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg()); } - String tblName = getDotName(qualified); AlterTableDesc alterTblDesc; if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) { - alterTblDesc = new AlterTableDesc(tblName, partSpec, - unescapeIdentifier(oldColName), unescapeIdentifier(newColName), - newType, newComment, first, flagCol, isCascade); + alterTblDesc = new AlterTableDesc(tName.getDbTable(), partSpec, unescapeIdentifier(oldColName), + unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade); } else { - alterTblDesc = new AlterTableDesc(tblName, partSpec, - unescapeIdentifier(oldColName), unescapeIdentifier(newColName), - newType, newComment, first, flagCol, isCascade, - primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints); + alterTblDesc = new AlterTableDesc(tName.getDbTable(), partSpec, unescapeIdentifier(oldColName), + unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, + uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints); } - addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); + addInputsOutputsAlterTable(tName.getDbTable(), partSpec, alterTblDesc); if (AcidUtils.isTransactionalTable(tab)) { // Note: we might actually need it only when certain changes (e.g. name or type?) are made. setAcidDdlDesc(alterTblDesc); @@ -3343,30 +3312,27 @@ private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, alterBucketNum))); } - private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast, - HashMap partSpec, AlterTableTypes alterType) throws SemanticException { + private void analyzeAlterTableModifyCols(TableName tName, ASTNode ast, Map partSpec, + AlterTableTypes alterType) throws SemanticException { - String tblName = getDotName(qualified); List newCols = getColumns((ASTNode) ast.getChild(0)); boolean isCascade = false; if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { isCascade = true; } - AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, newCols, - alterType, isCascade); - Table table = getTable(tblName, true); + AlterTableDesc alterTblDesc = + new AlterTableDesc(tName.getNotEmptyDbTable(), partSpec, newCols, alterType, isCascade); + Table table = getTable(tName.getDbTable(), true); if (AcidUtils.isTransactionalTable(table)) { setAcidDdlDesc(alterTblDesc); } - addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc))); + addInputsOutputsAlterTable(tName.getDbTable(), partSpec, alterTblDesc); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } - private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean expectView) - throws SemanticException { + private void analyzeAlterTableDropParts(TableName tName, ASTNode ast, boolean expectView) throws SemanticException { boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null) || HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT); @@ -3383,7 +3349,7 @@ private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean Table tab = null; try { - tab = getTable(qualified); + tab = getTable(tName); } catch (SemanticException se){ if (replicationSpec.isInReplicationScope() && ( @@ -3417,16 +3383,16 @@ private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean addTableDropPartsOutputs(tab, partSpecs.values(), !ifExists); DropPartitionDesc dropTblDesc = - new DropPartitionDesc(getDotName(qualified), partSpecs, mustPurge, replicationSpec); + new DropPartitionDesc(tName, partSpecs, mustPurge, replicationSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc))); } - private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) + private void analyzeAlterTablePartColType(TableName tableName, ASTNode ast) throws SemanticException { // check if table exists. - Table tab = getTable(qualified); + Table tab = getTable(tableName); inputs.add(new ReadEntity(tab)); // validate the DDL is a valid operation on the table. @@ -3463,8 +3429,7 @@ private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(newCol.getName())); } - AlterTableAlterPartDesc alterTblAlterPartDesc = - new AlterTableAlterPartDesc(getDotName(qualified), newCol); + AlterTableAlterPartDesc alterTblAlterPartDesc = new AlterTableAlterPartDesc(tableName.getDbTable(), newCol); if (AcidUtils.isTransactionalTable(tab)) { setAcidDdlDesc(alterTblAlterPartDesc); } @@ -3473,7 +3438,7 @@ private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) alterTblAlterPartDesc))); } - /** + /** * Add one or more partitions to a table. Useful when the data has been copied * to the right location by some other process. * @@ -3486,13 +3451,12 @@ private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) * @throws SemanticException * Parsing failed */ - private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boolean expectView) - throws SemanticException { + private void analyzeAlterTableAddParts(TableName tName, CommonTree ast, boolean expectView) throws SemanticException { // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+) boolean ifNotExists = ast.getChild(0).getType() == HiveParser.TOK_IFNOTEXISTS; - Table tab = getTable(qualified); + Table tab = getTable(tName); boolean isView = tab.isView(); validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView); outputs.add(new WriteEntity(tab, @@ -3567,9 +3531,9 @@ private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boole // Compile internal query to capture underlying table partition dependencies StringBuilder cmd = new StringBuilder(); cmd.append("SELECT * FROM "); - cmd.append(HiveUtils.unparseIdentifier(qualified[0])); + cmd.append(HiveUtils.unparseIdentifier(tName.getDb())); cmd.append("."); - cmd.append(HiveUtils.unparseIdentifier(qualified[1])); + cmd.append(HiveUtils.unparseIdentifier(tName.getTable())); cmd.append(" WHERE "); boolean firstOr = true; for (int i = 0; i < addPartitionDesc.getPartitionCount(); ++i) { @@ -3672,10 +3636,9 @@ private void handleTransactionalTable(Table tab, AddPartitionDesc addPartitionDe * @throws SemanticException * Parsing failed */ - private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) - throws SemanticException { + private void analyzeAlterTableTouch(TableName tName, CommonTree ast) throws SemanticException { - Table tab = getTable(qualified); + Table tab = getTable(tName); validateAlterTableType(tab, AlterTableTypes.TOUCH); inputs.add(new ReadEntity(tab)); @@ -3684,7 +3647,7 @@ private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) if (partSpecs.size() == 0) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - getDotName(qualified), null, + tName.getDbTable(), null, AlterTableDesc.AlterTableTypes.TOUCH); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_NO_LOCK)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), @@ -3692,23 +3655,21 @@ private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) } else { addTablePartsOutputs(tab, partSpecs, WriteEntity.WriteType.DDL_NO_LOCK); for (Map partSpec : partSpecs) { - AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - getDotName(qualified), partSpec, - AlterTableDesc.AlterTableTypes.TOUCH); + AlterTableSimpleDesc touchDesc = + new AlterTableSimpleDesc(tName.getDbTable(), partSpec, AlterTableDesc.AlterTableTypes.TOUCH); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc))); } } } - private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolean isUnArchive) - throws SemanticException { + private void analyzeAlterTableArchive(TableName tName, CommonTree ast, boolean isUnArchive) throws SemanticException { if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) { throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg()); } - Table tab = getTable(qualified); + Table tab = getTable(tName); // partition name to value List> partSpecs = getPartitionSpecs(tab, ast); @@ -3731,8 +3692,7 @@ private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolea } catch (HiveException e) { throw new SemanticException(e.getMessage(), e); } - AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc( - getDotName(qualified), partSpec, + AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc(tName.getDbTable(), partSpec, (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc))); @@ -4105,35 +4065,34 @@ private void addTableDropPartsOutputs(Table tab, * node * @throws SemanticException */ - private void analyzeAltertableSkewedby(String[] qualified, ASTNode ast) throws SemanticException { + private void analyzeAltertableSkewedby(TableName tName, ASTNode ast) throws SemanticException { /** * Throw an error if the user tries to use the DDL with * hive.internal.ddl.list.bucketing.enable set to false. */ SessionState.get().getConf(); - Table tab = getTable(qualified); + Table tab = getTable(tName); inputs.add(new ReadEntity(tab)); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_EXCLUSIVE)); validateAlterTableType(tab, AlterTableTypes.ADDSKEWEDBY); - String tableName = getDotName(qualified); if (ast.getChildCount() == 0) { /* Convert a skewed table to non-skewed table. */ - AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, true, - new ArrayList(), new ArrayList>()); + AlterTableDesc alterTblDesc = + new AlterTableDesc(tName.getDbTable(), true, new ArrayList<>(), new ArrayList<>()); alterTblDesc.setStoredAsSubDirectories(false); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } else { switch (((ASTNode) ast.getChild(0)).getToken().getType()) { case HiveParser.TOK_TABLESKEWED: - handleAlterTableSkewedBy(ast, tableName, tab); + handleAlterTableSkewedBy(ast, tName.getDbTable(), tab); break; case HiveParser.TOK_STOREDASDIRS: - handleAlterTableDisableStoredAsDirs(tableName, tab); + handleAlterTableDisableStoredAsDirs(tName.getDbTable(), tab); break; default: assert false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java index 4a366a9360..83c0d2bf43 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java @@ -115,8 +115,8 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { MmContext mmCtx = MmContext.createIfNeeded(ts == null ? null : ts.tableHandle); Utilities.FILE_OP_LOGGER.debug("Exporting table {}: MM context {}", - ts == null ? null : ts.tableName, mmCtx); - // Configure export work + ts == null ? null : ts.getTableName(), mmCtx); + // Configure export work ExportWork exportWork = new ExportWork(exportRootDirName, ts, replicationSpec, ErrorMsg.INVALID_PATH.getMsg(ast), acidTableName, mmCtx); // Create an export task and add it as a root task diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java new file mode 100644 index 0000000000..b473366f90 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.parse; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * A utility class for {@link TableName}. + */ +public final class HiveTableName extends TableName { + + public HiveTableName(String catName, String dbName, String tableName) { + super(catName, dbName, tableName); + } + + /** + * Get a @{@link TableName} object based on a @{@link Table}. This is basically a wrapper of + * @{@link TableName#fromString(String, String, String)} to throw a {@link SemanticException} in case of errors. + * @param table the table + * @return a {@link TableName} + * @throws SemanticException + */ + public static TableName of(Table table) throws SemanticException { + return ofNullable(table.getTableName(), table.getDbName()); + } + + /** + * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a + * {@link TableName}. All parts can be null. + * + * @param dbTableName + * @return a {@link TableName} + * @throws SemanticException + * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} + */ + @Deprecated + public static TableName ofNullable(String dbTableName) throws SemanticException { + return ofNullable(dbTableName, SessionState.get().getCurrentDatabase()); + } + + /** + * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a + * {@link TableName}. All parts can be null. This method won't try to find the default db based on the session state. + * + * @param dbTableName + * @return a {@link TableName} + * @throws SemanticException + * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} + */ + @Deprecated + public static TableName ofNullableWithNoDefault(String dbTableName) throws SemanticException { + return ofNullable(dbTableName, null); + } + + /** + * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a + * {@link TableName}. All parts can be null. + * + * @param dbTableName + * @param defaultDb + * @return a {@link TableName} + * @throws SemanticException + * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} + */ + @Deprecated + public static TableName ofNullable(String dbTableName, String defaultDb) throws SemanticException { + if (dbTableName == null) { + return new TableName(null, null, null); + } else { + try { + return fromString(dbTableName, SessionState.get().getCurrentCatalog(), defaultDb); + } catch (IllegalArgumentException e) { + throw new SemanticException(e.getCause()); + } + } + } + + /** + * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a + * {@link TableName}. This method won't try to find the default db/catalog based on the session state. + * + * @param dbTableName not null + * @return a {@link TableName} + * @throws SemanticException if dbTableName is null + * @deprecated use {@link #of(String)} instead and use the default db/catalog. + */ + @Deprecated + public static TableName withNoDefault(String dbTableName) throws SemanticException { + try { + return fromString(dbTableName, null, null); + } catch (IllegalArgumentException e) { + throw new SemanticException(e.getCause()); + } + } + + /** + * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a + * {@link TableName}. + * + * @param dbTableName not null + * @return a {@link TableName} + * @throws SemanticException if dbTableName is null + */ + public static TableName of(String dbTableName) throws SemanticException { + try { + return fromString(dbTableName, SessionState.get().getCurrentCatalog(), SessionState.get().getCurrentDatabase()); + } catch (IllegalArgumentException e) { + throw new SemanticException(e.getCause()); + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MaterializedViewRebuildSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MaterializedViewRebuildSemanticAnalyzer.java index 7a3c16390c..9ac11fcaf0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MaterializedViewRebuildSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MaterializedViewRebuildSemanticAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.parse; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.LockState; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -54,8 +55,8 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { return; } - String[] qualifiedTableName = getQualifiedTableName((ASTNode) ast.getChild(0)); - String dbDotTable = getDotName(qualifiedTableName); + TableName tableName = getQualifiedTableName((ASTNode) ast.getChild(0)); + final String dbDotTable = tableName.getNotEmptyDbTable(); ASTNode rewrittenAST; // We need to go lookup the table and get the select statement and then parse it. try { @@ -72,7 +73,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { } Context ctx = new Context(queryState.getConf()); rewrittenAST = ParseUtils.parse("insert overwrite table " + - "`" + qualifiedTableName[0] + "`.`" + qualifiedTableName[1] + "` " + + "`" + dbDotTable + "` " + viewText, ctx); this.ctx.addRewrittenStatementContext(ctx); @@ -84,7 +85,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { LockState state; try { state = txnManager.acquireMaterializationRebuildLock( - qualifiedTableName[0], qualifiedTableName[1], txnManager.getCurrentTxnId()).getState(); + tableName.getDb(), tableName.getTable(), txnManager.getCurrentTxnId()).getState(); } catch (LockException e) { throw new SemanticException("Exception acquiring lock for rebuilding the materialized view", e); } @@ -96,8 +97,8 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { throw new SemanticException(e); } mvRebuildMode = MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD; - mvRebuildDbName = qualifiedTableName[0]; - mvRebuildName = qualifiedTableName[1]; + mvRebuildDbName = tableName.getDb(); + mvRebuildName = tableName.getTable(); LOG.debug("Rebuilding materialized view " + dbDotTable); super.analyzeInternal(rewrittenAST); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java index 33247f0745..31068cb8c3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java @@ -24,6 +24,7 @@ import java.util.Map; import java.util.Set; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; @@ -177,7 +178,7 @@ protected Table getTargetTable(ASTNode tabRef) throws SemanticException { * @param throwException if false, return null if table doesn't exist, else throw */ protected static Table getTable(ASTNode tabRef, Hive db, boolean throwException) throws SemanticException { - String[] tableName; + TableName tableName; switch (tabRef.getType()) { case HiveParser.TOK_TABREF: tableName = getQualifiedTableName((ASTNode) tabRef.getChild(0)); @@ -191,12 +192,12 @@ protected static Table getTable(ASTNode tabRef, Hive db, boolean throwException) Table mTable; try { - mTable = db.getTable(tableName[0], tableName[1], throwException); + mTable = db.getTable(tableName.getDb(), tableName.getTable(), throwException); } catch (InvalidTableException e) { - LOG.error("Failed to find table " + getDotName(tableName) + " got exception " + e.getMessage()); - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(getDotName(tableName)), e); + LOG.error("Failed to find table " + tableName.getNotEmptyDbTable() + " got exception " + e.getMessage()); + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName.getNotEmptyDbTable()), e); } catch (HiveException e) { - LOG.error("Failed to find table " + getDotName(tableName) + " got exception " + e.getMessage()); + LOG.error("Failed to find table " + tableName.getNotEmptyDbTable() + " got exception " + e.getMessage()); throw new SemanticException(e.getMessage(), e); } return mTable; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 0e58fe20b4..5696e7c006 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -71,6 +71,7 @@ import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.common.StringInternUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; @@ -2261,9 +2262,9 @@ private void getMetaData(QB qb, ReadEntity parentInput) // Whether we are using an acid compliant transaction manager has already been caught in // UpdateDeleteSemanticAnalyzer, so if we are updating or deleting and getting nonAcid // here, it means the table itself doesn't support it. - throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.tableName); + throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.getTableName().getTable()); } else { - throw new SemanticException(ErrorMsg.ACID_OP_ON_INSERTONLYTRAN_TABLE, ts.tableName); + throw new SemanticException(ErrorMsg.ACID_OP_ON_INSERTONLYTRAN_TABLE, ts.getTableName().getTable()); } } // TableSpec ts is got from the query (user specified), @@ -2282,7 +2283,7 @@ private void getMetaData(QB qb, ReadEntity parentInput) } if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { // Add the table spec for the destination table. - qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts); + qb.getParseInfo().addTableSpec(ts.getTableName().getTable().toLowerCase(), ts); } break; } @@ -2290,8 +2291,8 @@ private void getMetaData(QB qb, ReadEntity parentInput) case HiveParser.TOK_DIR: { // This is a dfs file String fname = stripQuotes(ast.getChild(0).getText()); - if ((!qb.getParseInfo().getIsSubQ()) - && (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) { + if ((!qb.getParseInfo().getIsSubQ()) && (((ASTNode) ast.getChild(0)).getToken().getType() + == HiveParser.TOK_TMP_FILE)) { if (qb.isCTAS() || qb.isMaterializedView()) { qb.setIsQuery(false); @@ -2316,27 +2317,25 @@ private void getMetaData(QB qb, ReadEntity parentInput) location = wh.getDatabasePath(db.getDatabase(destTableDb)); } catch (MetaException e) { throw new SemanticException(e); - } } - try { - CreateTableDesc tblDesc = qb.getTableDesc(); - if (tblDesc != null - && tblDesc.isTemporary() - && AcidUtils.isInsertOnlyTable(tblDesc.getTblProps(), true)) { - fname = FileUtils.makeQualified(location, conf).toString(); - } else { - fname = ctx.getExtTmpPathRelTo( - FileUtils.makeQualified(location, conf)).toString(); - } - } catch (Exception e) { - throw new SemanticException(generateErrorMessage(ast, - "Error creating temporary folder on: " + location.toString()), e); - } - if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { - TableSpec ts = new TableSpec(db, conf, this.ast); - // Add the table spec for the destination table. - qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts); + } + try { + CreateTableDesc tblDesc = qb.getTableDesc(); + if (tblDesc != null && tblDesc.isTemporary() && AcidUtils + .isInsertOnlyTable(tblDesc.getTblProps(), true)) { + fname = FileUtils.makeQualified(location, conf).toString(); + } else { + fname = ctx.getExtTmpPathRelTo(FileUtils.makeQualified(location, conf)).toString(); } + } catch (Exception e) { + throw new SemanticException( + generateErrorMessage(ast, "Error creating temporary folder on: " + location.toString()), e); + } + if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { + TableSpec ts = new TableSpec(db, conf, this.ast); + // Add the table spec for the destination table. + qb.getParseInfo().addTableSpec(ts.getTableName().getTable().toLowerCase(), ts); + } } else { // This is the only place where isQuery is set to true; it defaults to false. qb.setIsQuery(true); @@ -6945,8 +6944,7 @@ private void genPartnCols(String dest, Operator input, QB qb, @SuppressWarnings("unchecked") private void setStatsForNonNativeTable(String dbName, String tableName) throws SemanticException { - String qTableName = DDLSemanticAnalyzer.getDotName(new String[] { dbName, - tableName }); + String qTableName = HiveTableName.ofNullable(tableName, dbName).getNotEmptyDbTable(); AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, null, false); HashMap mapProp = new HashMap<>(); mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); @@ -13246,8 +13244,8 @@ boolean hasConstraints(final List partCols, final List cols = new ArrayList(); @@ -13489,7 +13487,8 @@ ASTNode analyzeCreateTable( } tblProps = validateAndAddDefaultProperties( tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); + addDbAndTabToOutputs(new String[] { qualifiedTabName.getDb(), qualifiedTabName.getTable() }, + TableType.MANAGED_TABLE, isTemporary, tblProps); CreateTableDesc crtTblDesc = new CreateTableDesc(dbDotTab, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, @@ -13511,11 +13510,12 @@ ASTNode analyzeCreateTable( case ctt: // CREATE TRANSACTIONAL TABLE if (isExt) { throw new SemanticException( - qualifiedTabName[1] + " cannot be declared transactional because it's an external table"); + qualifiedTabName.getTable() + " cannot be declared transactional because it's an external table"); } tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, false, tblProps); + addDbAndTabToOutputs(new String[] { qualifiedTabName.getDb(), qualifiedTabName.getTable() }, + TableType.MANAGED_TABLE, false, tblProps); CreateTableDesc crtTranTblDesc = new CreateTableDesc(dbDotTab, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, @@ -13536,7 +13536,8 @@ ASTNode analyzeCreateTable( case CTLT: // create table like tblProps = validateAndAddDefaultProperties( tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); + addDbAndTabToOutputs(new String[] { qualifiedTabName.getDb(), qualifiedTabName.getTable() }, + TableType.MANAGED_TABLE, isTemporary, tblProps); if (isTemporary) { Table likeTable = getTable(likeTableName, false); @@ -13556,17 +13557,15 @@ ASTNode analyzeCreateTable( if (isTemporary) { if (!ctx.isExplainSkipExecution() && !isMaterialization) { - String dbName = qualifiedTabName[0]; - String tblName = qualifiedTabName[1]; SessionState ss = SessionState.get(); if (ss == null) { throw new SemanticException("No current SessionState, cannot create temporary table " - + dbName + "." + tblName); + + qualifiedTabName.getNotEmptyDbTable()); } Map tables = SessionHiveMetaStoreClient. - getTempTablesForDatabase(dbName, tblName); - if (tables != null && tables.containsKey(tblName)) { - throw new SemanticException("Temporary table " + dbName + "." + tblName + getTempTablesForDatabase(qualifiedTabName.getDb(), qualifiedTabName.getTable()); + if (tables != null && tables.containsKey(qualifiedTabName.getTable())) { + throw new SemanticException("Temporary table " + qualifiedTabName.getNotEmptyDbTable() + " already exists"); } } @@ -13619,8 +13618,9 @@ ASTNode analyzeCreateTable( tblProps = validateAndAddDefaultProperties( tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); - tableDesc = new CreateTableDesc(qualifiedTabName[0], dbDotTab, isExt, isTemporary, cols, + addDbAndTabToOutputs(new String[] { qualifiedTabName.getDb(), qualifiedTabName.getTable() }, + TableType.MANAGED_TABLE, isTemporary, tblProps); + tableDesc = new CreateTableDesc(qualifiedTabName.getDb(), dbDotTab, isExt, isTemporary, cols, partColNames, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(), @@ -13655,8 +13655,8 @@ private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, } protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCtx) throws SemanticException { - String[] qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0)); - String dbDotTable = getDotName(qualTabName); + TableName qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0)); + final String dbDotTable = qualTabName.getNotEmptyDbTable(); List cols = null; boolean ifNotExists = false; boolean rewriteEnabled = true; @@ -13764,7 +13764,8 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps()); - addDbAndTabToOutputs(qualTabName, TableType.MATERIALIZED_VIEW, false, tblProps); + addDbAndTabToOutputs(new String[] { qualTabName.getDb(), qualTabName.getTable() }, TableType.MATERIALIZED_VIEW, + false, tblProps); queryState.setCommandType(HiveOperation.CREATE_MATERIALIZED_VIEW); } else { createVwDesc = new CreateViewDesc( @@ -13772,7 +13773,8 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt ifNotExists, orReplace, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), storageFormat.getSerde()); rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), createVwDesc))); - addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW, false, tblProps); + addDbAndTabToOutputs(new String[] { qualTabName.getDb(), qualTabName.getTable() }, TableType.VIRTUAL_VIEW, false, + tblProps); queryState.setCommandType(HiveOperation.CREATEVIEW); } qb.setViewDesc(createVwDesc); @@ -15281,15 +15283,14 @@ public boolean isValidQueryCaching() { */ protected String getFullTableNameForSQL(ASTNode n) throws SemanticException { switch (n.getType()) { - case HiveParser.TOK_TABNAME: - String[] tableName = getQualifiedTableName(n); - return getDotName(new String[] { - HiveUtils.unparseIdentifier(tableName[0], this.conf), - HiveUtils.unparseIdentifier(tableName[1], this.conf) }); - case HiveParser.TOK_TABREF: - return getFullTableNameForSQL((ASTNode) n.getChild(0)); - default: - throw raiseWrongType("TOK_TABNAME", n); + case HiveParser.TOK_TABNAME: + TableName tableName = getQualifiedTableName(n); + return HiveTableName.ofNullable(HiveUtils.unparseIdentifier(tableName.getTable(), this.conf), + HiveUtils.unparseIdentifier(tableName.getDb(), this.conf)).getNotEmptyDbTable(); + case HiveParser.TOK_TABREF: + return getFullTableNameForSQL((ASTNode) n.getChild(0)); + default: + throw raiseWrongType("TOK_TABNAME", n); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java index 61b60680dc..d923a2cfd4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java @@ -251,8 +251,7 @@ protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticExcept ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { isTable = true; - String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); - object = BaseSemanticAnalyzer.getDotName(qualified); + object = BaseSemanticAnalyzer.getQualifiedTableName(gchild).getNotEmptyDbTable(); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java index c2e26f0710..ad459963b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java @@ -152,7 +152,7 @@ private void writeData(PartitionIterable partitions) throws SemanticException { if (tableSpec.tableHandle.isPartitioned()) { if (partitions == null) { throw new IllegalStateException("partitions cannot be null for partitionTable :" - + tableSpec.tableName); + + tableSpec.getTableName().getTable()); } new PartitionExport(paths, partitions, distCpDoAsUser, conf, mmCtx).write(replicationSpec); } else { @@ -315,7 +315,7 @@ public AuthEntities getAuthEntities() throws SemanticException { if (tableSpec.tableHandle.isPartitioned()) { if (partitions == null) { throw new IllegalStateException("partitions cannot be null for partitionTable :" - + tableSpec.tableName); + + tableSpec.getTableName().getTable()); } for (Partition partition : partitions) { authEntities.inputs.add(new ReadEntity(partition)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java index 5e88b6ebae..26f204e013 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DropPartitionDesc; @@ -43,7 +44,8 @@ Map> partSpecs = ReplUtils.genPartSpecs(new Table(msg.getTableObj()), msg.getPartitions()); if (partSpecs.size() > 0) { - DropPartitionDesc dropPtnDesc = new DropPartitionDesc(actualDbName + "." + actualTblName, partSpecs, true, + DropPartitionDesc dropPtnDesc = + new DropPartitionDesc(HiveTableName.ofNullable(actualTblName, actualDbName), partSpecs, true, context.eventOnlyReplicationSpec()); Task dropPtnTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc), context.hiveConf diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index 8603521041..9cd9f53f03 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -39,7 +39,6 @@ import com.google.common.collect.ImmutableList; import java.io.Serializable; import java.util.ArrayList; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -120,7 +119,7 @@ boolean first; String afterCol; boolean expectView; - HashMap partSpec; + Map partSpec; private String newLocation; boolean protectModeEnable; ProtectModeType protectModeType; @@ -160,7 +159,7 @@ public AlterTableDesc() { * @param newType * @throws SemanticException */ - public AlterTableDesc(String tblName, HashMap partSpec, + public AlterTableDesc(String tblName, Map partSpec, String oldColName, String newColName, String newType, String newComment, boolean first, String afterCol, boolean isCascade) throws SemanticException { super(); @@ -176,7 +175,7 @@ public AlterTableDesc(String tblName, HashMap partSpec, this.isCascade = isCascade; } - public AlterTableDesc(String tblName, HashMap partSpec, + public AlterTableDesc(String tblName, Map partSpec, String oldColName, String newColName, String newType, String newComment, boolean first, String afterCol, boolean isCascade, List primaryKeyCols, List foreignKeyCols, List uniqueConstraintCols, @@ -227,7 +226,7 @@ public AlterTableDesc(String oldName, String newName, boolean expectView, Replic * new columns to be added * @throws SemanticException */ - public AlterTableDesc(String name, HashMap partSpec, List newCols, + public AlterTableDesc(String name, Map partSpec, List newCols, AlterTableTypes alterType, boolean isCascade) throws SemanticException { op = alterType; setOldName(name); @@ -263,7 +262,7 @@ public AlterTableDesc(AlterTableTypes alterType) { * @param partSpec * Partition specifier with map of key and values. */ - public AlterTableDesc(AlterTableTypes alterType, HashMap partSpec, boolean expectView) { + public AlterTableDesc(AlterTableTypes alterType, Map partSpec, boolean expectView) { op = alterType; this.partSpec = partSpec; this.expectView = expectView; @@ -281,7 +280,7 @@ public AlterTableDesc(AlterTableTypes alterType, HashMap partSpe * @throws SemanticException */ public AlterTableDesc(String name, String inputFormat, String outputFormat, - String serdeName, String storageHandler, HashMap partSpec) throws SemanticException { + String serdeName, String storageHandler, Map partSpec) throws SemanticException { super(); op = AlterTableTypes.ADDFILEFORMAT; setOldName(name); @@ -293,7 +292,7 @@ public AlterTableDesc(String name, String inputFormat, String outputFormat, } public AlterTableDesc(String tableName, int numBuckets, - List bucketCols, List sortCols, HashMap partSpec) throws SemanticException { + List bucketCols, List sortCols, Map partSpec) throws SemanticException { setOldName(tableName); op = AlterTableTypes.ADDCLUSTERSORTCOLUMN; numberBuckets = numBuckets; @@ -302,7 +301,7 @@ public AlterTableDesc(String tableName, int numBuckets, this.partSpec = partSpec; } - public AlterTableDesc(String tableName, boolean sortingOff, HashMap partSpec) throws SemanticException { + public AlterTableDesc(String tableName, boolean sortingOff, Map partSpec) throws SemanticException { setOldName(tableName); op = AlterTableTypes.ADDCLUSTERSORTCOLUMN; isTurnOffSorting = sortingOff; @@ -310,7 +309,7 @@ public AlterTableDesc(String tableName, boolean sortingOff, HashMap partSpec) throws SemanticException { + Map partSpec) throws SemanticException { op = AlterTableTypes.ALTERLOCATION; setOldName(tableName); this.newLocation = newLocation; @@ -318,7 +317,7 @@ public AlterTableDesc(String tableName, String newLocation, } public AlterTableDesc(String tableName, Map, String> locations, - HashMap partSpec) throws SemanticException { + Map partSpec) throws SemanticException { op = AlterTableTypes.ALTERSKEWEDLOCATION; setOldName(tableName); this.skewedLocations = locations; @@ -334,7 +333,7 @@ public AlterTableDesc(String tableName, boolean turnOffSkewed, this.skewedColValues = new ArrayList>(skewedColValues); } - public AlterTableDesc(String tableName, HashMap partSpec, int numBuckets) throws SemanticException { + public AlterTableDesc(String tableName, Map partSpec, int numBuckets) throws SemanticException { op = AlterTableTypes.ALTERBUCKETNUM; setOldName(tableName); this.partSpec = partSpec; @@ -770,14 +769,14 @@ public void setExpectView(boolean expectView) { /** * @return part specification */ - public HashMap getPartSpec() { + public Map getPartSpec() { return partSpec; } /** * @param partSpec */ - public void setPartSpec(HashMap partSpec) { + public void setPartSpec(Map partSpec) { this.partSpec = partSpec; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java index 40def601e6..af5d8e69b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java @@ -195,7 +195,7 @@ public String getTableName() { if (work.getLoadTableDesc() != null) { return work.getLoadTableDesc().getTable().getTableName(); } else if (work.getTableSpecs() != null) { - return work.getTableSpecs().tableName; + return work.getTableSpecs().getTableName().getTable(); } else if (getLoadFileDesc().getCtasCreateTableDesc() != null) { return getLoadFileDesc().getCtasCreateTableDesc().getTableName(); } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropPartitionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropPartitionDesc.java index 81fcc4689d..ee2fbafa4b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropPartitionDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropPartitionDesc.java @@ -23,6 +23,7 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -57,12 +58,12 @@ public int getPrefixLength() { } } - private final String tableName; + private final TableName tableName; private final ArrayList partSpecs; private final boolean ifPurge; private final ReplicationSpec replicationSpec; - public DropPartitionDesc(String tableName, Map> partSpecs, boolean ifPurge, + public DropPartitionDesc(TableName tableName, Map> partSpecs, boolean ifPurge, ReplicationSpec replicationSpec) { this.tableName = tableName; this.partSpecs = new ArrayList(partSpecs.size()); @@ -78,7 +79,7 @@ public DropPartitionDesc(String tableName, Map getPartSpecs() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExportWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExportWork.java index ffb81b54b9..caa22a03b4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExportWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExportWork.java @@ -107,7 +107,7 @@ public MmContext getMmContext() { */ public void acidPostProcess(Hive db) throws HiveException { if (acidFqTableName != null) { - LOG.info("Swapping export of " + tableSpec.tableName + " to " + acidFqTableName + + LOG.info("Swapping export of " + tableSpec.getTableName().getTable() + " to " + acidFqTableName + " using partSpec=" + tableSpec.partSpec); tableSpec = new TableSpec(db, acidFqTableName, tableSpec.partSpec, true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index dd3af1b35c..8e077bc7ee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -24,6 +24,7 @@ import java.util.Map; import com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -38,7 +39,7 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -92,9 +93,9 @@ public ImportTableDesc(String dbName, Table table) throws Exception { table.getTTable().getWriteId()); this.createTblDesc.setStoredAsSubDirectories(table.getSd().isStoredAsSubDirectories()); break; - case VIEW: - String[] qualViewName = { dbName, table.getTableName() }; - String dbDotView = BaseSemanticAnalyzer.getDotName(qualViewName); + case VIEW: + TableName qualViewName = HiveTableName.ofNullable(table.getTableName(), dbName); + final String dbDotView = qualViewName.getNotEmptyDbTable(); if (table.isMaterializedView()) { this.createViewDesc = new CreateViewDesc(dbDotView, table.getAllCols(), @@ -199,14 +200,13 @@ public String getLocation() { public void setTableName(String tableName) throws SemanticException { switch (getDescType()) { - case TABLE: - createTblDesc.setTableName(tableName); - break; - case VIEW: - String[] qualViewName = { dbName, tableName }; - String dbDotView = BaseSemanticAnalyzer.getDotName(qualViewName); - createViewDesc.setViewName(dbDotView); - break; + case TABLE: + createTblDesc.setTableName(tableName); + break; + case VIEW: + TableName qualViewName = HiveTableName.ofNullable(tableName, dbName); + createViewDesc.setViewName(qualViewName.getNotEmptyDbTable()); + break; } } diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java b/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java index f5cb192561..e3fa060402 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java @@ -17,34 +17,45 @@ */ package org.apache.hadoop.hive.common; +import java.io.Serializable; +import java.util.Objects; + /** * A container for a fully qualified table name, i.e. catalogname.databasename.tablename. Also * includes utilities for string parsing. */ -public class TableName { +public class TableName implements Serializable { + + private static final long serialVersionUID = 1L; + + /** Exception message thrown. */ + private static final String ILL_ARG_EXCEPTION_MSG = + "Table name must be either , . " + "or .."; + + /** Names of the related DB objects. */ private final String cat; private final String db; private final String table; /** * - * @param cat catalog name. Cannot be null. If you do not know it you can get it from + * @param catName catalog name. Cannot be null. If you do not know it you can get it from * SessionState.getCurrentCatalog() if you want to use the catalog from the current * session, or from MetaStoreUtils.getDefaultCatalog() if you do not have a session * or want to use the default catalog for the Hive instance. - * @param db database name. Cannot be null. If you do not now it you can get it from + * @param dbName database name. Cannot be null. If you do not now it you can get it from * SessionState.getCurrentDatabase() or use Warehouse.DEFAULT_DATABASE_NAME. - * @param table table name, cannot be null + * @param tableName table name, cannot be null */ - public TableName(String cat, String db, String table) { - this.cat = cat; - this.db = db; - this.table = table; + public TableName(final String catName, final String dbName, final String tableName) { + this.cat = catName; + this.db = dbName; + this.table = tableName; } /** * Build a TableName from a string of the form [[catalog.]database.]table. - * @param name name in string form + * @param name name in string form, not null * @param defaultCatalog default catalog to use if catalog is not in the name. If you do not * know it you can get it from SessionState.getCurrentCatalog() if you * want to use the catalog from the current session, or from @@ -54,17 +65,21 @@ public TableName(String cat, String db, String table) { * not now it you can get it from SessionState.getCurrentDatabase() or * use Warehouse.DEFAULT_DATABASE_NAME. * @return TableName + * @throws IllegalArgumentException if a non-null name is given */ - public static TableName fromString(String name, String defaultCatalog, String defaultDatabase) { + public static TableName fromString(final String name, final String defaultCatalog, final String defaultDatabase) + throws IllegalArgumentException { + if (name == null) { + throw new IllegalArgumentException(String.join("", "Table value was null. ", ILL_ARG_EXCEPTION_MSG)); + } if (name.contains(DatabaseName.CAT_DB_TABLE_SEPARATOR)) { - String names[] = name.split("\\."); + String[] names = name.split("\\."); if (names.length == 2) { return new TableName(defaultCatalog, names[0], names[1]); } else if (names.length == 3) { return new TableName(names[0], names[1], names[2]); } else { - throw new RuntimeException("Table name must be either , . " + - "or .."); + throw new IllegalArgumentException(ILL_ARG_EXCEPTION_MSG); } } else { @@ -86,10 +101,19 @@ public String getTable() { /** * Get the name in db.table format, for use with stuff not yet converted to use the catalog. + * Fair warning, that if the db is null, this will return null.tableName + * @deprecated use {@link #getNotEmptyDbTable()} instead. */ + @Deprecated public String getDbTable() { return db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + } + /** + * Get the name in db.table format, if db is not empty, otherwise pass only the table name. + */ + public String getNotEmptyDbTable() { + return db == null || db.isEmpty() ? table : db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; } /** @@ -97,25 +121,26 @@ public String getDbTable() { */ public static String getDbTable(String dbName, String tableName) { return dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; - } public static String getQualified(String catName, String dbName, String tableName) { return catName + DatabaseName.CAT_DB_TABLE_SEPARATOR + dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; } - @Override - public int hashCode() { - return (cat.hashCode() * 31 + db.hashCode()) * 31 + table.hashCode(); + @Override public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TableName tableName = (TableName) o; + return Objects.equals(cat, tableName.cat) && Objects.equals(db, tableName.db) && Objects + .equals(table, tableName.table); } - @Override - public boolean equals(Object obj) { - if (obj != null && obj instanceof TableName) { - TableName that = (TableName)obj; - return table.equals(that.table) && db.equals(that.db) && cat.equals(that.cat); - } - return false; + @Override public int hashCode() { + return Objects.hash(cat, db, table); } @Override diff --git a/storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java b/storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java index 0a8cb2a82e..f19c7358c9 100644 --- a/storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java +++ b/storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java @@ -22,7 +22,7 @@ public class TestTableName { @Test - public void fullname() { + public void fullName() { TableName name = new TableName("cat", "db", "t"); Assert.assertEquals("cat", name.getCat()); Assert.assertEquals("db", name.getDb()); @@ -47,5 +47,24 @@ public void fromString() { Assert.assertEquals("cat", name.getCat()); Assert.assertEquals("db", name.getDb()); Assert.assertEquals("tab", name.getTable()); + + try { + TableName.fromString(null, null, null); + Assert.fail("Name can't be null"); + } catch (IllegalArgumentException e) { + Assert.assertTrue(true); + } + } + + @Test + public void testNotEmptyDbTable() { + TableName name = new TableName("cat", "db", "t"); + Assert.assertEquals("db.t", name.getNotEmptyDbTable()); + + name = new TableName("cat", null, "t"); + Assert.assertEquals("t", name.getNotEmptyDbTable()); + + name = new TableName("cat", "", "t"); + Assert.assertEquals("t", name.getNotEmptyDbTable()); } }