diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java index 333db4db66..f14e6285cb 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java @@ -154,6 +154,7 @@ static public Deserializer getDeserializer(Configuration conf, ObjectInspector oi = deserializer.getObjectInspector(); String[] names = tableName.split("\\."); String last_name = names[names.length - 1]; + // 0 = db, 1 = table for (int i = 2; i < names.length; i++) { if (oi instanceof StructObjectInspector) { diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 48ebc4f870..e426e4af0c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -339,7 +339,7 @@ private void acquireLocks() throws CommandProcessorException { fsd1.getDirName().compareTo(fsd2.getDirName())); for (FileSinkDesc desc : acidSinks) { TableDesc tableInfo = desc.getTableInfo(); - final TableName tn = HiveTableName.ofNullable(tableInfo.getTableName()); + final TableName tn = HiveTableName.of(tableInfo.getTableName()); long writeId = driverContext.getTxnManager().getTableWriteId(tn.getDb(), tn.getTable()); desc.setTableWriteId(writeId); diff --git ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java index 4b833b730c..b510f265c5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java +++ ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java @@ -52,6 +52,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -631,13 +632,13 @@ public long getSize() { } } - public void notifyTableChanged(String dbName, String tableName, long updateTime) { - LOG.debug("Table changed: {}.{}, at {}", dbName, tableName, updateTime); + public void notifyTableChanged(TableName tableName, long updateTime) { + LOG.debug("Table changed: {}, at {}", tableName, updateTime); // Invalidate all cache entries using this table. List entriesToInvalidate = null; rwLock.writeLock().lock(); try { - String key = (dbName.toLowerCase() + "." + tableName.toLowerCase()); + String key = (tableName.toString()); Set entriesForTable = tableToEntryMap.get(key); if (entriesForTable != null) { // Possible concurrent modification issues if we try to remove cache entries while @@ -989,7 +990,7 @@ public void accept(NotificationEvent event) { QueryResultsCache cache = QueryResultsCache.getInstance(); if (cache != null) { long eventTime = event.getEventTime() * 1000L; - cache.notifyTableChanged(dbName, tableName, eventTime); + cache.notifyTableChanged(TableName.fromString(tableName, null, dbName), eventTime); } else { LOG.debug("Cache not instantiated, skipping event on {}.{}", dbName, tableName); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java index 9e9d30f246..5fd8f0a095 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java @@ -63,7 +63,7 @@ public AlterTableType getType() { @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDbTableName() { - return tableName.getNotEmptyDbTable(); + return tableName.toString(); } @Explain(displayName = "partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -96,7 +96,7 @@ public EnvironmentContext getEnvironmentContext() { @Override public String getFullTableName() { - return tableName.getNotEmptyDbTable(); + return tableName.toString(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintDesc.java index 87c65de370..368b4ad97d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintDesc.java @@ -51,7 +51,7 @@ public TableName getTableName() { @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDbTableName() { - return tableName.getNotEmptyDbTable(); + return tableName.toString(); } public ReplicationSpec getReplicationSpec() { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java index 091c146940..e73aea6048 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java @@ -33,17 +33,17 @@ public class AlterTableRenameDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - private final String newName; + private final TableName newTableName; - public AlterTableRenameDesc(TableName tableName, ReplicationSpec replicationSpec, boolean expectView, String newName) + public AlterTableRenameDesc(TableName tableName, ReplicationSpec replicationSpec, boolean expectView, TableName newTableName) throws SemanticException { super(AlterTableType.RENAME, tableName, null, replicationSpec, false, expectView, null); - this.newName = newName; + this.newTableName = newTableName; } @Explain(displayName = "new table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getNewName() { - return newName; + public TableName getNewTableName() { + return newTableName; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java index 73ea400dcc..0c63c98ea8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java @@ -48,6 +48,6 @@ public int execute() throws HiveException { @Override protected void doAlteration(Table table, Partition partition) throws HiveException { - HiveTableName.setFrom(desc.getNewName(), table); + HiveTableName.setFrom(desc.getNewTableName(), table); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index dc6d31a9cb..9c5f3fa460 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -84,8 +84,7 @@ private ColumnStatistics constructColumnStatsFromInput() colStats.getStatsDesc().getTableName()); return colStats; } - String dbName = work.dbName(); - String tableName = work.getTableName(); + TableName tableName = work.getTableName(); String partName = work.getPartName(); String colName = work.getColName(); String columnType = work.getColType(); @@ -291,7 +290,7 @@ private ColumnStatistics constructColumnStatsFromInput() } else { throw new SemanticException("Unsupported type"); } - ColumnStatisticsDesc statsDesc = getColumnStatsDesc(dbName, tableName, + ColumnStatisticsDesc statsDesc = getColumnStatsDesc(tableName, partName, partName == null); ColumnStatistics colStat = new ColumnStatistics(); colStat.setStatsDesc(statsDesc); @@ -300,11 +299,10 @@ private ColumnStatistics constructColumnStatsFromInput() return colStat; } - private ColumnStatisticsDesc getColumnStatsDesc(String dbName, - String tableName, String partName, boolean isTblLevel) { + private ColumnStatisticsDesc getColumnStatsDesc(TableName tableName, String partName, boolean isTblLevel) { ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(); - statsDesc.setDbName(dbName); - statsDesc.setTableName(tableName); + statsDesc.setDbName(tableName.getDb()); + statsDesc.setTableName(tableName.getTable()); statsDesc.setIsTblLevel(isTblLevel); if (!isTblLevel) { statsDesc.setPartName(partName); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index c1f94d165b..faad908b33 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -42,6 +42,7 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -744,7 +745,7 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, private boolean isPrintable(Object val) { if (val instanceof Boolean || val instanceof String || val instanceof Integer || val instanceof Long || val instanceof Byte - || val instanceof Float || val instanceof Double || val instanceof Path) { + || val instanceof Float || val instanceof Double || val instanceof Path || val instanceof TableName) { return true; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 46bb37a0c2..e6dee0e73d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -435,14 +435,14 @@ public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalo throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName, dbName); + return TableName.fromString(tableName, null, dbName); } final String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); if (tableName.contains(".")) { throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName); + return HiveTableName.of(tableName); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index ba019c7553..53d5a53560 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -253,7 +253,7 @@ private void analyzeAlterTableUpdateStats(ASTNode ast, TableName tblName, Map par else if (entry.getKey().equals("external") && entry.getValue().equals("true")) { if (hasConstraintsEnabled(tableName.getTable())) { throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Table: " + tableName.getDbTable() + " has constraints enabled." + ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Table: " + tableName.getNotEmptyDbTable() + " has constraints enabled." + "Please remove those constraints to change this property.")); } } @@ -775,7 +781,7 @@ private void analyzeAlterTableRename(TableName source, ASTNode ast, boolean expe throws SemanticException { final TableName target = getQualifiedTableName((ASTNode) ast.getChild(0)); - AlterTableRenameDesc alterTblDesc = new AlterTableRenameDesc(source, null, expectView, target.getDbTable()); + AlterTableRenameDesc alterTblDesc = new AlterTableRenameDesc(source, null, expectView, target); Table table = getTable(source.getDbTable(), true); if (AcidUtils.isTransactionalTable(table)) { setAcidDdlDesc(alterTblDesc); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java index cd9f88c53b..bca520f16e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java @@ -38,37 +38,22 @@ public HiveTableName(String catName, String dbName, String tableName) { * @throws SemanticException */ public static TableName of(Table table) throws SemanticException { - return ofNullable(table.getTableName(), table.getDbName()); + return ofNullable(table.getTableName(), table.getDbName()); // todo: this shouldn't call nullable } /** - * Set a @{@link Table} object's table and db names based on the provided string. - * @param dbTable the dbtable string + * Set a @{@link Table} object's table and db names based on the provided tableName object. + * @param tableName the tableName object * @param table the table to update * @return the table * @throws SemanticException */ - public static Table setFrom(String dbTable, Table table) throws SemanticException{ - TableName name = ofNullable(dbTable); - table.setTableName(name.getTable()); - table.setDbName(name.getDb()); + public static Table setFrom(TableName tableName, Table table) throws SemanticException{ + table.setTableName(tableName.getTable()); + table.setDbName(tableName.getDb()); return table; } - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. All parts can be null. - * - * @param dbTableName - * @return a {@link TableName} - * @throws SemanticException - * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} - */ - // to be @Deprecated - public static TableName ofNullable(String dbTableName) throws SemanticException { - return ofNullable(dbTableName, SessionState.get().getCurrentDatabase()); - } - /** * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a * {@link TableName}. All parts can be null. This method won't try to find the default db based on the session state. @@ -94,12 +79,13 @@ public static TableName ofNullableWithNoDefault(String dbTableName) throws Seman * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} */ // to be @Deprecated - public static TableName ofNullable(String dbTableName, String defaultDb) throws SemanticException { + private static TableName ofNullable(String dbTableName, String defaultDb) throws SemanticException { // todo: decommission if (dbTableName == null) { return new TableName(null, null, null); } else { try { - return fromString(dbTableName, SessionState.get().getCurrentCatalog(), defaultDb); + final String cat = defaultDb == null || defaultDb.trim().isEmpty() ? null : SessionState.get().getCurrentCatalog(); // if a db is null, so should the catalog be + return fromString(dbTableName, cat, defaultDb); } catch (IllegalArgumentException e) { throw new SemanticException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index f46739eb67..d617953ce0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2307,9 +2307,9 @@ private void getMetaData(QB qb, ReadEntity parentInput) // Whether we are using an acid compliant transaction manager has already been caught in // UpdateDeleteSemanticAnalyzer, so if we are updating or deleting and getting nonAcid // here, it means the table itself doesn't support it. - throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.getTableName().getTable()); + throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.getTableName().getNotEmptyDbTable()); } else { - throw new SemanticException(ErrorMsg.ACID_OP_ON_INSERTONLYTRAN_TABLE, ts.getTableName().getTable()); + throw new SemanticException(ErrorMsg.ACID_OP_ON_INSERTONLYTRAN_TABLE, ts.getTableName().getNotEmptyDbTable()); } } // TableSpec ts is got from the query (user specified), @@ -7076,7 +7076,7 @@ private Operator genMaterializedViewDataOrgPlan(List sortColInfos, L } private void setStatsForNonNativeTable(String dbName, String tableName) throws SemanticException { - TableName qTableName = HiveTableName.ofNullable(tableName, dbName); + TableName qTableName = TableName.fromString(tableName, null, dbName); Map mapProp = new HashMap<>(); mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); AlterTableUnsetPropertiesDesc alterTblDesc = new AlterTableUnsetPropertiesDesc(qTableName, null, null, false, @@ -7631,7 +7631,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) fileSinkColInfos = new ArrayList<>(); destTableIsTemporary = tblDesc.isTemporary(); destTableIsMaterialization = tblDesc.isMaterialization(); - tableName = TableName.fromString(tblDesc.getDbTableName(), null, tblDesc.getDatabaseName()); + tableName = tblDesc.getTableName(); tblProps = tblDesc.getTblProps(); } else if (viewDesc != null) { fieldSchemas = new ArrayList<>(); @@ -13663,7 +13663,7 @@ ASTNode analyzeCreateTable( case ctt: // CREATE TRANSACTIONAL TABLE if (isExt) { throw new SemanticException( - qualifiedTabName.getTable() + " cannot be declared transactional because it's an external table"); + qualifiedTabName.getNotEmptyDbTable() + " cannot be declared transactional because it's an external table"); } tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); @@ -15392,7 +15392,7 @@ protected String getFullTableNameForSQL(ASTNode n) throws SemanticException { switch (n.getType()) { case HiveParser.TOK_TABNAME: TableName tableName = getQualifiedTableName(n); - return HiveTableName.ofNullable(HiveUtils.unparseIdentifier(tableName.getTable(), this.conf), + return TableName.fromString(HiveUtils.unparseIdentifier(tableName.getTable(), this.conf), null, HiveUtils.unparseIdentifier(tableName.getDb(), this.conf)).getNotEmptyDbTable(); case HiveParser.TOK_TABREF: return getFullTableNameForSQL((ASTNode) n.getChild(0)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java index 97a1dd31a7..6d9dac8ea1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java @@ -154,7 +154,7 @@ private void writeData(PartitionIterable partitions) throws SemanticException { if (tableSpec.tableHandle.isPartitioned()) { if (partitions == null) { throw new IllegalStateException("partitions cannot be null for partitionTable :" - + tableSpec.getTableName().getTable()); + + tableSpec.getTableName().getNotEmptyDbTable()); } new PartitionExport(paths, partitions, distCpDoAsUser, conf, mmCtx).write(replicationSpec); } else { @@ -316,7 +316,7 @@ public AuthEntities getAuthEntities() throws SemanticException { if (tableSpec.tableHandle.isPartitioned()) { if (partitions == null) { throw new IllegalStateException("partitions cannot be null for partitionTable :" - + tableSpec.getTableName().getTable()); + + tableSpec.getTableName().getNotEmptyDbTable()); } for (Partition partition : partitions) { authEntities.inputs.add(new ReadEntity(partition)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java index 34d3b00500..f7e9366311 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java @@ -37,7 +37,7 @@ DropConstraintMessage msg = deserializer.getDropConstraintMessage(context.dmd.getPayload()); final String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName; final String actualTblName = msg.getTable(); - final TableName tName = HiveTableName.ofNullable(actualTblName, actualDbName); + final TableName tName = TableName.fromString(actualTblName, null, actualDbName); String constraintName = msg.getConstraint(); AlterTableDropConstraintDesc dropConstraintsDesc = diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java index 066549d9cd..4fa088dfab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.parse.repl.load.message; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.messaging.DropPartitionMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.partition.drop.AlterTableDropPartitionDesc; @@ -45,7 +46,7 @@ ReplUtils.genPartSpecs(new Table(msg.getTableObj()), msg.getPartitions()); if (partSpecs.size() > 0) { AlterTableDropPartitionDesc dropPtnDesc = - new AlterTableDropPartitionDesc(HiveTableName.ofNullable(actualTblName, actualDbName), partSpecs, true, + new AlterTableDropPartitionDesc(TableName.fromString(actualTblName, null, actualDbName), partSpecs, true, context.eventOnlyReplicationSpec()); Task dropPtnTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc), context.hiveConf diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java index 82e50ff442..4637be7856 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java @@ -62,7 +62,7 @@ replicationSpec.setMigratingToTxnTable(); } AlterTableRenameDesc renameTableDesc = - new AlterTableRenameDesc(oldName, replicationSpec, false, newName.getNotEmptyDbTable()); + new AlterTableRenameDesc(oldName, replicationSpec, false, newName); renameTableDesc.setWriteId(msg.getWriteId()); Task renameTableTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, renameTableDesc), context.hiveConf); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java index c90ea437f5..c79f4971da 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java @@ -21,6 +21,7 @@ import java.io.Serializable; import java.util.Map; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -39,8 +40,7 @@ private static final long serialVersionUID = 1L; private final String partName; private final Map mapProp; - private final String dbName; - private final String tableName; + private final TableName tableName; private final String colName; private final String colType; private final ColumnStatistics colStats; @@ -50,13 +50,11 @@ public ColumnStatsUpdateWork(String partName, Map mapProp, - String dbName, - String tableName, + TableName tableName, String colName, String colType) { this.partName = partName; this.mapProp = mapProp; - this.dbName = dbName; this.tableName = tableName; this.colName = colName; this.colType = colType; @@ -69,8 +67,7 @@ public ColumnStatsUpdateWork(ColumnStatistics colStats, boolean isMigratingToTxn this.isMigratingToTxn = isMigratingToTxn; this.partName = null; this.mapProp = null; - this.dbName = null; - this.tableName = null; + this.tableName = null; // FIXME: This won't do this.colName = null; this.colType = null; } @@ -88,11 +85,7 @@ public String getPartName() { return mapProp; } - public String dbName() { - return dbName; - } - - public String getTableName() { + public TableName getTableName() { return tableName; } @@ -117,7 +110,7 @@ public void setWriteId(long writeId) { @Override public String getFullTableName() { - return dbName + "." + tableName; + return tableName.getNotEmptyDbTable(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index dc7040e388..83d043eeee 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -56,7 +56,7 @@ public ImportTableDesc(String dbName, Table table) throws Exception { this.dbName = dbName; this.table = table; - final TableName tableName = HiveTableName.ofNullable(table.getTableName(), dbName); + final TableName tableName = TableName.fromString(table.getTableName(), null, dbName); switch (getDescType()) { case TABLE: diff --git ql/src/test/results/clientnegative/create_external_transactional.q.out ql/src/test/results/clientnegative/create_external_transactional.q.out index 6a0f5c14bf..dcd42096d2 100644 --- ql/src/test/results/clientnegative/create_external_transactional.q.out +++ ql/src/test/results/clientnegative/create_external_transactional.q.out @@ -1 +1 @@ -FAILED: SemanticException transactional_external cannot be declared transactional because it's an external table +FAILED: SemanticException default.transactional_external cannot be declared transactional because it's an external table diff --git ql/src/test/results/clientnegative/delete_non_acid_table.q.out ql/src/test/results/clientnegative/delete_non_acid_table.q.out index dafac6d7df..19fd5fb426 100644 --- ql/src/test/results/clientnegative/delete_non_acid_table.q.out +++ ql/src/test/results/clientnegative/delete_non_acid_table.q.out @@ -34,4 +34,4 @@ POSTHOOK: Input: default@not_an_acid_table2 -1070883071 0ruyd6Y50JpdGRf6HqD -1070551679 iUR3Q -1069736047 k17Am8uPHWk02cEf1jet -FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table not_an_acid_table2 that is not transactional +FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table default.not_an_acid_table2 that is not transactional diff --git ql/src/test/results/clientnegative/mm_delete.q.out ql/src/test/results/clientnegative/mm_delete.q.out index d0fd905673..ed7bafba98 100644 --- ql/src/test/results/clientnegative/mm_delete.q.out +++ ql/src/test/results/clientnegative/mm_delete.q.out @@ -65,4 +65,4 @@ POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@mm_srcpart@ds=2008-04-08/hr=11 POSTHOOK: Lineage: mm_srcpart PARTITION(ds=2008-04-08,hr=11).key SCRIPT [] POSTHOOK: Lineage: mm_srcpart PARTITION(ds=2008-04-08,hr=11).value SCRIPT [] -FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table mm_srcpart that is insert-only transactional +FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table default.mm_srcpart that is insert-only transactional diff --git ql/src/test/results/clientnegative/mm_update.q.out ql/src/test/results/clientnegative/mm_update.q.out index 528d16269f..946ffd1598 100644 --- ql/src/test/results/clientnegative/mm_update.q.out +++ ql/src/test/results/clientnegative/mm_update.q.out @@ -55,4 +55,4 @@ POSTHOOK: Input: default@mm_srcpart@ds=2008-04-09/hr=11 2008-04-09 11 43 val_43 2008-04-09 11 413 val_413 2008-04-09 11 413 val_413 -FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table mm_srcpart that is insert-only transactional +FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table default.mm_srcpart that is insert-only transactional diff --git ql/src/test/results/clientnegative/update_non_acid_table.q.out ql/src/test/results/clientnegative/update_non_acid_table.q.out index 64164ba4ed..02946fc185 100644 --- ql/src/test/results/clientnegative/update_non_acid_table.q.out +++ ql/src/test/results/clientnegative/update_non_acid_table.q.out @@ -34,4 +34,4 @@ POSTHOOK: Input: default@not_an_acid_table -1070883071 0ruyd6Y50JpdGRf6HqD -1070551679 iUR3Q -1069736047 k17Am8uPHWk02cEf1jet -FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table not_an_acid_table that is not transactional +FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table default.not_an_acid_table that is not transactional diff --git ql/src/test/results/clientpositive/input3.q.out ql/src/test/results/clientpositive/input3.q.out index 0365ff25ba..90d7a5f885 100644 --- ql/src/test/results/clientpositive/input3.q.out +++ ql/src/test/results/clientpositive/input3.q.out @@ -93,7 +93,7 @@ STAGE PLANS: Stage: Stage-0 Rename Table table name: default.TEST3b - new table name: default.TEST3c + new table name: hive.default.TEST3c PREHOOK: query: ALTER TABLE TEST3b RENAME TO TEST3c PREHOOK: type: ALTERTABLE_RENAME diff --git ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out index 981fa86d25..7bd9526aa7 100644 --- ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out +++ ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out @@ -174,7 +174,7 @@ POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: newdb@tab_n2 POSTHOOK: Output: newdb@tab_n2 Stage-0 - Rename Table{"table name:":"newDB.tab_n2","new table name:":"newDB.newName"} + Rename Table{"table name:":"newDB.tab_n2","new table name:":"hive.newDB.newName"} PREHOOK: query: drop table tab_n2 PREHOOK: type: DROPTABLE diff --git ql/src/test/results/clientpositive/tez/explainuser_3.q.out ql/src/test/results/clientpositive/tez/explainuser_3.q.out index d4374f02e3..96a2f60181 100644 --- ql/src/test/results/clientpositive/tez/explainuser_3.q.out +++ ql/src/test/results/clientpositive/tez/explainuser_3.q.out @@ -178,7 +178,7 @@ POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: newdb@tab_n1 POSTHOOK: Output: newdb@tab_n1 Stage-0 - Rename Table{"table name:":"newDB.tab_n1","new table name:":"newDB.newName"} + Rename Table{"table name:":"newDB.tab_n1","new table name:":"hive.newDB.newName"} PREHOOK: query: explain drop table tab_n1 PREHOOK: type: DROPTABLE diff --git storage-api/src/java/org/apache/hadoop/hive/common/TableName.java storage-api/src/java/org/apache/hadoop/hive/common/TableName.java index a0f47c2ebf..80e5c646e3 100644 --- storage-api/src/java/org/apache/hadoop/hive/common/TableName.java +++ storage-api/src/java/org/apache/hadoop/hive/common/TableName.java @@ -36,8 +36,11 @@ private final String cat; private final String db; private final String table; + private final boolean catIsBlank; + private final boolean dbIsBlank; /** + * Creates a TableName object, after applying lowercase to all params. * * @param catName catalog name. Cannot be null. If you do not know it you can get it from * SessionState.getCurrentCatalog() if you want to use the catalog from the current @@ -48,14 +51,38 @@ * @param tableName table name, cannot be null */ public TableName(final String catName, final String dbName, final String tableName) { - this.cat = catName; - this.db = dbName; - this.table = tableName; + if (tableName == null || tableName.trim().isEmpty()) { + throw new IllegalArgumentException(String.join("", "Table value was blank. ", ILL_ARG_EXCEPTION_MSG)); + } + + this.dbIsBlank = dbName == null || dbName.trim().isEmpty(); + this.catIsBlank = catName == null || catName.trim().isEmpty(); + if (dbIsBlank && !catIsBlank) { + throw new IllegalArgumentException(String.join("", "Invalid values: database was blank, while category wasn't. ", ILL_ARG_EXCEPTION_MSG)); + } + + this.cat = this.catIsBlank ? catName : catName.toLowerCase(); + this.db = this.dbIsBlank ? dbName : dbName.toLowerCase(); + this.table = tableName.toLowerCase(); + } + + /** + * Build a TableName from a string of the form [database.]table. + * @param name name in string form, not null of the form [[catalog.]database.]table. + * @param defaultDatabase default database to use if database is not in the name. If you do + * not now it you can get it from SessionState.getCurrentDatabase() or + * use Warehouse.DEFAULT_DATABASE_NAME. + * @return TableName + * @throws IllegalArgumentException if a non-null name is given + */ + public static TableName fromString(final String name, final String defaultDatabase) + throws IllegalArgumentException { + return fromString(name, null, defaultDatabase); } /** * Build a TableName from a string of the form [[catalog.]database.]table. - * @param name name in string form, not null + * @param name name in string form, not null of the form [[catalog.]database.]table. * @param defaultCatalog default catalog to use if catalog is not in the name. If you do not * know it you can get it from SessionState.getCurrentCatalog() if you * want to use the catalog from the current session, or from @@ -102,9 +129,9 @@ public String getTable() { /** * Get the name in db.table format, for use with stuff not yet converted to use the catalog. * Fair warning, that if the db is null, this will return null.tableName - * @deprecated use {@link #getNotEmptyDbTable()} instead. + * @deprecated use {@link #toString()} instead. */ - // to be @Deprecated + // todo: remove, refactor public String getDbTable() { return db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; } @@ -114,7 +141,7 @@ public String getDbTable() { */ public String getEscapedNotEmptyDbTable() { return - db == null || db.trim().isEmpty() ? + dbIsBlank ? "`" + table + "`" : "`" + db + "`" + DatabaseName.CAT_DB_TABLE_SEPARATOR + "`" + table + "`"; } @@ -128,10 +155,12 @@ public String getNotEmptyDbTable() { /** * Get the name in db.table format, for use with stuff not yet converted to use the catalog. */ + // todo: this can be quite unsafe public static String getDbTable(String dbName, String tableName) { return dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; } + // todo: this can be quite unsafe public static String getQualified(String catName, String dbName, String tableName) { return catName + DatabaseName.CAT_DB_TABLE_SEPARATOR + dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; } @@ -154,6 +183,10 @@ public static String getQualified(String catName, String dbName, String tableNam @Override public String toString() { - return cat + DatabaseName.CAT_DB_TABLE_SEPARATOR + db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + if (catIsBlank) { + return dbIsBlank ? table : db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + } else { + return cat + DatabaseName.CAT_DB_TABLE_SEPARATOR + db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + } } }