diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java index 9b66e6be74..fa33c2f013 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java @@ -174,7 +174,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, } try { - Table table = context.getHive().newTable(desc.getDbTableName()); + Table table = new Table(desc.getTableName()); if (desc.getLocation() != null) { table.setDataLocation(new Path(desc.getLocation())); } diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index cd54e28393..2b1f98a5ba 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.security.authorization.Privilege; @@ -336,7 +337,7 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive } else if (ddlDesc instanceof AlterTableSetLocationDesc) { AlterTableSetLocationDesc alterTable = (AlterTableSetLocationDesc)ddlDesc; Table table = hive.getTable(SessionState.get().getCurrentDatabase(), - Utilities.getDbTableName(alterTable.getDbTableName())[1], false); + HiveTableName.of(alterTable.getDbTableName()).getTable(), false); Partition part = null; if (alterTable.getPartitionSpec() != null) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java index 333db4db66..f14e6285cb 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java @@ -154,6 +154,7 @@ static public Deserializer getDeserializer(Configuration conf, ObjectInspector oi = deserializer.getObjectInspector(); String[] names = tableName.split("\\."); String last_name = names[names.length - 1]; + // 0 = db, 1 = table for (int i = 2; i < names.length; i++) { if (oi instanceof StructObjectInspector) { diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index a7343420cf..9c002933b2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -339,7 +339,7 @@ private void acquireLocks() throws CommandProcessorException { fsd1.getDirName().compareTo(fsd2.getDirName())); for (FileSinkDesc desc : acidSinks) { TableDesc tableInfo = desc.getTableInfo(); - final TableName tn = HiveTableName.ofNullable(tableInfo.getTableName()); + final TableName tn = HiveTableName.of(tableInfo.getTableName()); long writeId = driverContext.getTxnManager().getTableWriteId(tn.getDb(), tn.getTable()); desc.setTableWriteId(writeId); diff --git ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java index 4b833b730c..98afa6d430 100644 --- ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java +++ ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java @@ -52,6 +52,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -631,13 +632,13 @@ public long getSize() { } } - public void notifyTableChanged(String dbName, String tableName, long updateTime) { - LOG.debug("Table changed: {}.{}, at {}", dbName, tableName, updateTime); + public void notifyTableChanged(TableName tableName, long updateTime) { + LOG.debug("Table changed: {}, at {}", tableName, updateTime); // Invalidate all cache entries using this table. List entriesToInvalidate = null; rwLock.writeLock().lock(); try { - String key = (dbName.toLowerCase() + "." + tableName.toLowerCase()); + String key = (tableName.toString()); Set entriesForTable = tableToEntryMap.get(key); if (entriesForTable != null) { // Possible concurrent modification issues if we try to remove cache entries while @@ -989,7 +990,7 @@ public void accept(NotificationEvent event) { QueryResultsCache cache = QueryResultsCache.getInstance(); if (cache != null) { long eventTime = event.getEventTime() * 1000L; - cache.notifyTableChanged(dbName, tableName, eventTime); + cache.notifyTableChanged(TableName.fromString(tableName, dbName), eventTime); } else { LOG.debug("Cache not instantiated, skipping event on {}.{}", dbName, tableName); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java index c05d699bd8..9ca155138e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.misc.msck; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -33,6 +34,7 @@ import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.thrift.TException; @@ -54,22 +56,21 @@ public int execute() throws HiveException, IOException, TException { Msck msck = new Msck(false, false); msck.init(context.getDb().getConf()); - String[] names = Utilities.getDbTableName(desc.getTableName()); + TableName tName = HiveTableName.of(desc.getTableName()); long partitionExpirySeconds = -1L; try (HiveMetaStoreClient msc = new HiveMetaStoreClient(context.getConf())) { - Table table = msc.getTable(SessionState.get().getCurrentCatalog(), names[0], names[1]); - String qualifiedTableName = Warehouse.getCatalogQualifiedTableName(table); + Table table = msc.getTable(tName); boolean msckEnablePartitionRetention = MetastoreConf.getBoolVar(context.getConf(), MetastoreConf.ConfVars.MSCK_REPAIR_ENABLE_PARTITION_RETENTION); if (msckEnablePartitionRetention) { partitionExpirySeconds = PartitionManagementTask.getRetentionPeriodInSeconds(table); - LOG.info("{} - Retention period ({}s) for partition is enabled for MSCK REPAIR..", qualifiedTableName, + LOG.info("{} - Retention period ({}s) for partition is enabled for MSCK REPAIR..", tName, partitionExpirySeconds); } } - MsckInfo msckInfo = new MsckInfo(SessionState.get().getCurrentCatalog(), names[0], names[1], + MsckInfo msckInfo = new MsckInfo(tName, desc.getPartitionsSpecs(), desc.getResFile(), desc.isRepairPartitions(), desc.isAddPartitions(), desc.isDropPartitions(), partitionExpirySeconds); return msck.repair(msckInfo); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java index 9e9d30f246..e301b80c6d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java @@ -63,7 +63,8 @@ public AlterTableType getType() { @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDbTableName() { - return tableName.getNotEmptyDbTable(); + // TODO: use explain for tablename instead + return tableName.toString(); } @Explain(displayName = "partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -96,7 +97,7 @@ public EnvironmentContext getEnvironmentContext() { @Override public String getFullTableName() { - return tableName.getNotEmptyDbTable(); + return tableName.toString(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java index 323cdc42d3..deeffb6c45 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.session.SessionState; /** @@ -146,8 +147,8 @@ private void finalizeAlterTableWithWriteIdOp(Table table, Table oldTable, List

colNames = Lists.newArrayList(colName.toLowerCase()); - String[] dbTab = Utilities.getDbTableName(desc.getDbTableName()); + TableName tName = HiveTableName.of(desc.getDbTableName()); if (null == part) { if (table.isPartitioned()) { Map tableProps = table.getParameters() == null ? @@ -204,20 +206,20 @@ private void getColumnDataColPathSpecified(Table table, Partition part, List partitions = new ArrayList(); partitions.add(part.getName()); cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer)); - List partitionColStat = context.getDb().getPartitionColumnStatistics(dbTab[0].toLowerCase(), - dbTab[1].toLowerCase(), partitions, colNames, false).get(part.getName()); + List partitionColStat = context.getDb().getPartitionColumnStatistics(tName.getDb(), + tName.getTable(), partitions, colNames, false).get(part.getName()); if (partitionColStat != null) { colStats.addAll(partitionColStat); } @@ -247,13 +249,13 @@ private void getColumnDataForPartitionKeyColumn(Table table, List c } private void getColumnsForNotPartitionKeyColumn(List cols, List colStats, - Deserializer deserializer, List colNames, String[] dbTab, Map tableProps) + Deserializer deserializer, List colNames, TableName tName, Map tableProps) throws HiveException { cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer)); - List parts = context.getDb().getPartitionNames(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), + List parts = context.getDb().getPartitionNames(tName.getDb(), tName.getTable(), (short) -1); AggrStats aggrStats = context.getDb().getAggrColStatsFor( - dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, parts, false); + tName.getDb(), tName.getTable(), colNames, parts, false); colStats.addAll(aggrStats.getColStats()); if (parts.size() == aggrStats.getPartsFound()) { StatsSetupConst.setColumnStatsState(tableProps, colNames); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java index 091c146940..c831e0f825 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java @@ -33,17 +33,17 @@ public class AlterTableRenameDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - private final String newName; + private final TableName newTableName; - public AlterTableRenameDesc(TableName tableName, ReplicationSpec replicationSpec, boolean expectView, String newName) - throws SemanticException { + public AlterTableRenameDesc(TableName tableName, ReplicationSpec replicationSpec, boolean expectView, + TableName newTableName) throws SemanticException { super(AlterTableType.RENAME, tableName, null, replicationSpec, false, expectView, null); - this.newName = newName; + this.newTableName = newTableName; } @Explain(displayName = "new table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getNewName() { - return newName; + public TableName getNewTableName() { + return newTableName; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java index 73ea400dcc..6a1473a25f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java @@ -37,8 +37,7 @@ public AlterTableRenameOperation(DDLOperationContext context, AlterTableRenameDe @Override public int execute() throws HiveException { - String[] names = Utilities.getDbTableName(desc.getDbTableName()); - if (Utils.isBootstrapDumpInProgress(context.getDb(), names[0])) { + if (Utils.isBootstrapDumpInProgress(context.getDb(), HiveTableName.of(desc.getDbTableName()).getDb())) { LOG.error("DDLTask: Rename Table not allowed as bootstrap dump in progress"); throw new HiveException("Rename Table: Not allowed as bootstrap dump in progress"); } @@ -48,6 +47,6 @@ public int execute() throws HiveException { @Override protected void doAlteration(Table table, Partition partition) throws HiveException { - HiveTableName.setFrom(desc.getNewName(), table); + HiveTableName.setFrom(desc.getNewTableName(), table); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchOperation.java index 8d8ac20350..ece8d36019 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchOperation.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; /** * Operation process of touching a table. @@ -55,7 +56,7 @@ public int execute() throws HiveException { throw new HiveException("Specified partition does not exist"); } try { - context.getDb().alterPartition(table.getCatalogName(), table.getDbName(), table.getTableName(), part, + context.getDb().alterPartition(HiveTableName.of(table), part, environmentContext, true); } catch (InvalidOperationException e) { throw new HiveException(e); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/create/CreateViewDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/create/CreateViewDesc.java index d1f36945fb..2b0de09ba8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/create/CreateViewDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/create/CreateViewDesc.java @@ -26,6 +26,7 @@ import org.apache.commons.collections.CollectionUtils; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; @@ -36,6 +37,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -363,11 +365,9 @@ public ReplicationSpec getReplicationSpec() { } public Table toTable(HiveConf conf) throws HiveException { - String[] names = Utilities.getDbTableName(getViewName()); - String databaseName = names[0]; - String tableName = names[1]; + TableName tableName = HiveTableName.of(getViewName()); - Table tbl = new Table(databaseName, tableName); + Table tbl = new Table(tableName); tbl.setViewOriginalText(getViewOriginalText()); tbl.setViewExpandedText(getViewExpandedText()); if (isMaterialized()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index dc6d31a9cb..9c5f3fa460 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -84,8 +84,7 @@ private ColumnStatistics constructColumnStatsFromInput() colStats.getStatsDesc().getTableName()); return colStats; } - String dbName = work.dbName(); - String tableName = work.getTableName(); + TableName tableName = work.getTableName(); String partName = work.getPartName(); String colName = work.getColName(); String columnType = work.getColType(); @@ -291,7 +290,7 @@ private ColumnStatistics constructColumnStatsFromInput() } else { throw new SemanticException("Unsupported type"); } - ColumnStatisticsDesc statsDesc = getColumnStatsDesc(dbName, tableName, + ColumnStatisticsDesc statsDesc = getColumnStatsDesc(tableName, partName, partName == null); ColumnStatistics colStat = new ColumnStatistics(); colStat.setStatsDesc(statsDesc); @@ -300,11 +299,10 @@ private ColumnStatistics constructColumnStatsFromInput() return colStat; } - private ColumnStatisticsDesc getColumnStatsDesc(String dbName, - String tableName, String partName, boolean isTblLevel) { + private ColumnStatisticsDesc getColumnStatsDesc(TableName tableName, String partName, boolean isTblLevel) { ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(); - statsDesc.setDbName(dbName); - statsDesc.setTableName(tableName); + statsDesc.setDbName(tableName.getDb()); + statsDesc.setTableName(tableName.getTable()); statsDesc.setIsTblLevel(isTblLevel); if (!isTblLevel) { statsDesc.setPartName(partName); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index c1f94d165b..faad908b33 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -42,6 +42,7 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -744,7 +745,7 @@ private JSONArray outputList(List l, PrintStream out, boolean hasHeader, private boolean isPrintable(Object val) { if (val instanceof Boolean || val instanceof String || val instanceof Integer || val instanceof Long || val instanceof Byte - || val instanceof Float || val instanceof Double || val instanceof Path) { + || val instanceof Float || val instanceof Double || val instanceof Path || val instanceof TableName) { return true; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 51de87f2fd..dcb5d04e21 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.BucketCol; import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.SortCol; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.LoadFileDesc; import org.apache.hadoop.hive.ql.plan.LoadMultiFilesDesc; @@ -833,8 +834,7 @@ private void updatePartitionBucketSortColumns(Hive db, Table table, Partition pa } if (updateBucketCols || updateSortCols) { - db.alterPartition(table.getCatalogName(), table.getDbName(), table.getTableName(), - partn, null, true); + db.alterPartition(HiveTableName.of(table), partn, null, true); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index e9966e6364..afdd1eafe3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -2182,43 +2182,6 @@ public static String formatBinaryString(byte[] array, int start, int length) { return names; } - /** - * Extract db and table name from dbtable string, where db and table are separated by "." - * If there is no db name part, set the current sessions default db - * @param dbtable - * @return String array with two elements, first is db name, second is table name - * @throws SemanticException - * @deprecated use {@link TableName} or {@link org.apache.hadoop.hive.ql.parse.HiveTableName} instead - */ - @Deprecated - public static String[] getDbTableName(String dbtable) throws SemanticException { - return getDbTableName(SessionState.get().getCurrentDatabase(), dbtable); - } - - /** - * Extract db and table name from dbtable string. - * @param defaultDb - * @param dbtable - * @return String array with two elements, first is db name, second is table name - * @throws SemanticException - * @deprecated use {@link TableName} or {@link org.apache.hadoop.hive.ql.parse.HiveTableName} instead - */ - @Deprecated - public static String[] getDbTableName(String defaultDb, String dbtable) throws SemanticException { - if (dbtable == null) { - return new String[2]; - } - String[] names = dbtable.split("\\."); - switch (names.length) { - case 2: - return names; - case 1: - return new String [] {defaultDb, dbtable}; - default: - throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, dbtable); - } - } - public static void validateColumnNames(List colNames, List checkCols) throws SemanticException { Iterator checkColsIter = checkCols.iterator(); @@ -2239,44 +2202,6 @@ public static void validateColumnNames(List colNames, List check } } - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. All parts can be null. - * - * @param dbTableName - * @return a {@link TableName} - * @throws SemanticException - * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} - */ - @Deprecated - public static TableName getNullableTableName(String dbTableName) throws SemanticException { - return getNullableTableName(dbTableName, SessionState.get().getCurrentDatabase()); - } - - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. All parts can be null. - * - * @param dbTableName - * @param defaultDb - * @return a {@link TableName} - * @throws SemanticException - * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} - */ - @Deprecated - public static TableName getNullableTableName(String dbTableName, String defaultDb) throws SemanticException { - if (dbTableName == null) { - return new TableName(null, null, null); - } else { - try { - return TableName - .fromString(dbTableName, SessionState.get().getCurrentCatalog(), defaultDb); - } catch (IllegalArgumentException e) { - throw new SemanticException(e.getCause()); - } - } - } - /** * Gets the default notification interval to send progress updates to the tracker. Useful for * operators that may not output data for a while. diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index d7b6eeae55..65816ea978 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -250,7 +250,7 @@ private boolean shouldReplayEvent(FileStatus dir, DumpType dumpType, String dbNa HashMap mapProp = new HashMap<>(); mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState); - TableName tName = TableName.fromString(tableName, null, dbName); + TableName tName = TableName.fromString(tableName, dbName); AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(tName, partSpec, new ReplicationSpec(replState, replState), false, mapProp, false, false, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java index fc7f226d77..970e11a909 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java @@ -147,7 +147,7 @@ HashMap mapProp = new HashMap<>(); mapProp.put(REPL_CHECKPOINT_KEY, dumpRoot); - final TableName tName = TableName.fromString(tableDesc.getTableName(), null, tableDesc.getDatabaseName()); + final TableName tName = TableName.fromString(tableDesc.getTableName(), tableDesc.getDatabaseName()); AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(tName, partSpec, null, false, mapProp, false, false, null); return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterTblDesc), conf); diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java index e7d8e55695..42615a6190 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.HiveTableName; /** * Implementation of a pre execute hook that updates the access @@ -76,7 +77,7 @@ public void run(HookContext hookContext) throws Exception { Table t = db.getTable(dbName, tblName); p = db.getPartition(t, p.getSpec(), false); p.setLastAccessTime(lastAccessTime); - db.alterPartition(null, dbName, tblName, p, null, false); + db.alterPartition(HiveTableName.of(t), p, null, false); t.setLastAccessTime(lastAccessTime); db.alterTable(dbName + "." + tblName, t, false, null, false); break; diff --git ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index dbbe6f1ec5..ee46f24e51 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -2442,7 +2442,7 @@ public static ValidWriteIdList getTableValidWriteIdListWithTxnList( } public static String getFullTableName(String dbName, String tableName) { - return TableName.fromString(tableName, null, dbName).getNotEmptyDbTable().toLowerCase(); + return TableName.fromString(tableName, dbName).getNotEmptyDbTable(); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index d40a67fcdc..6e01777671 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -192,6 +192,7 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveAugmentMaterializationRule; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType; @@ -733,37 +734,28 @@ public void alterTable(Table newTbl, boolean cascade, EnvironmentContext environ newTbl.getTableName(), newTbl, cascade, environmentContext, transactional); } - /** - * Updates the existing table metadata with the new metadata. - * - * @param fullyQlfdTblName - * name of the existing table - * @param newTbl - * new name of the table. could be the old name - * @param transactional - * Need to generate and save a table snapshot into the metastore? - * @throws HiveException - */ - public void alterTable(String fullyQlfdTblName, Table newTbl, EnvironmentContext environmentContext, + public void alterTable(TableName tableName, Table newTbl, EnvironmentContext environmentContext, boolean transactional) throws HiveException { - String[] names = Utilities.getDbTableName(fullyQlfdTblName); - alterTable(null, names[0], names[1], newTbl, false, environmentContext, transactional); + alterTable(tableName.getCat(), tableName.getDb(), tableName.getTable(), newTbl, false, environmentContext, transactional); } - public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade, + public void alterTable(TableName tableName, Table newTbl, boolean cascade, EnvironmentContext environmentContext, boolean transactional) throws HiveException { - String[] names = Utilities.getDbTableName(fullyQlfdTblName); - alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional); + alterTable(tableName.getCat(), tableName.getDb(), tableName.getTable(), newTbl, cascade, environmentContext, transactional); } public void alterTable(String fullyQlfdTblName, Table newTbl, boolean cascade, - EnvironmentContext environmentContext, boolean transactional, long writeId) - throws HiveException { - String[] names = Utilities.getDbTableName(fullyQlfdTblName); - alterTable(null, names[0], names[1], newTbl, cascade, environmentContext, transactional, - writeId); + EnvironmentContext environmentContext, boolean transactional) + throws HiveException { + TableName tableName = HiveTableName.of(fullyQlfdTblName); + alterTable(tableName, newTbl, cascade, environmentContext, transactional); + } + + public void alterTable(TableName tableName, Table newTbl, boolean cascade, + EnvironmentContext environmentContext, boolean transactional, long writeId) throws HiveException { + alterTable(tableName.getCat(), tableName.getDb(), tableName.getTable(), newTbl, cascade, environmentContext, transactional, writeId); } public void alterTable(String catName, String dbName, String tblName, Table newTbl, boolean cascade, @@ -850,16 +842,13 @@ public void updateCreationMetadata(String dbName, String tableName, CreationMeta public void alterPartition(String tblName, Partition newPart, EnvironmentContext environmentContext, boolean transactional) throws InvalidOperationException, HiveException { - String[] names = Utilities.getDbTableName(tblName); - alterPartition(null, names[0], names[1], newPart, environmentContext, transactional); + alterPartition(HiveTableName.of(tblName), newPart, environmentContext, transactional); } /** * Updates the existing partition metadata with the new metadata. * - * @param dbName - * name of the exiting table's database - * @param tblName + * @param tableName * name of the existing table * @param newPart * new partition @@ -871,12 +860,12 @@ public void alterPartition(String tblName, Partition newPart, * if the changes in metadata is not acceptable * @throws HiveException */ - public void alterPartition(String catName, String dbName, String tblName, Partition newPart, + public void alterPartition(TableName tableName, Partition newPart, EnvironmentContext environmentContext, boolean transactional) throws InvalidOperationException, HiveException { try { - if (catName == null) { - catName = getDefaultCatalog(conf); + if (tableName.getCat() == null) { + tableName = HiveTableName.of(tableName.toString()); } validatePartition(newPart); String location = newPart.getLocation(); @@ -893,11 +882,11 @@ public void alterPartition(String catName, String dbName, String tblName, Partit if (tableSnapshot != null) { newPart.getTPartition().setWriteId(tableSnapshot.getWriteId()); } else { - LOG.warn("Cannot get a table snapshot for " + tblName); + LOG.warn("Cannot get a table snapshot for " + tableName.getTable()); } } - getSynchronizedMSC().alter_partition(catName, - dbName, tblName, newPart.getTPartition(), environmentContext, + getSynchronizedMSC().alter_partition(tableName.getCat(), + tableName.getDb(), tableName.getTable(), newPart.getTPartition(), environmentContext, tableSnapshot == null ? null : tableSnapshot.getValidWriteIdList()); } catch (MetaException e) { @@ -928,10 +917,9 @@ private void validatePartition(Partition newPart) throws HiveException { * if the changes in metadata is not acceptable * @throws HiveException */ - public void alterPartitions(String tblName, List newParts, + public void alterPartitions(TableName tblName, List newParts, EnvironmentContext environmentContext, boolean transactional) throws InvalidOperationException, HiveException { - String[] names = Utilities.getDbTableName(tblName); List newTParts = new ArrayList(); try { @@ -951,7 +939,7 @@ public void alterPartitions(String tblName, List newParts, } newTParts.add(tmpPart.getTPartition()); } - getMSC().alter_partitions(names[0], names[1], newTParts, environmentContext, + getMSC().alter_partitions(tblName.getDb(), tblName.getTable(), newTParts, environmentContext, tableSnapshot != null ? tableSnapshot.getValidWriteIdList() : null, tableSnapshot != null ? tableSnapshot.getWriteId() : -1); } catch (MetaException e) { @@ -1166,8 +1154,8 @@ public void createTable(Table tbl, boolean ifNotExists) throws HiveException { * thrown if the drop fails */ public void dropTable(String tableName, boolean ifPurge) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - dropTable(names[0], names[1], true, true, ifPurge); + TableName tn = HiveTableName.of(tableName); + dropTable(tn.getDb(), tn.getTable(), true, true, ifPurge); } /** @@ -1308,8 +1296,7 @@ public Table getTable(final String tableName) throws HiveException { * table doesn't exist */ public Table getTable(final String tableName, boolean throwException) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - return this.getTable(names[0], names[1], throwException); + return this.getTable(HiveTableName.of(tableName), throwException); } /** @@ -1324,13 +1311,7 @@ public Table getTable(final String tableName, boolean throwException) throws Hiv * if there's an internal error or if the table doesn't exist */ public Table getTable(final String dbName, final String tableName) throws HiveException { - // TODO: catalog... etc everywhere - if (tableName.contains(".")) { - String[] names = Utilities.getDbTableName(tableName); - return this.getTable(names[0], names[1], true); - } else { - return this.getTable(dbName, tableName, true); - } + return this.getTable(TableName.fromString(tableName, dbName)); } /** @@ -1343,8 +1324,23 @@ public Table getTable(final String dbName, final String tableName) throws HiveEx * if there's an internal error or if the table doesn't exist */ public Table getTable(TableName tableName) throws HiveException { + return getTable(tableName, true); + } + + /** + * Returns metadata of the table + * + * @param tableName + * the tableName object + * @param throwException + * controls whether an exception is thrown or a returns a null + * @return the table + * @exception HiveException + * if there's an internal error or if the table doesn't exist + */ + public Table getTable(TableName tableName, boolean throwException) throws HiveException { return this.getTable(ObjectUtils.firstNonNull(tableName.getDb(), SessionState.get().getCurrentDatabase()), - tableName.getTable(), true); + tableName.getTable(), throwException); } /** @@ -3367,7 +3363,7 @@ private void alterPartitionSpec(Table tbl, String partPath) throws HiveException, InvalidOperationException { alterPartitionSpecInMemory(tbl, partSpec, tpart, inheritTableSpecs, partPath); - alterPartition(tbl.getCatalogName(), tbl.getDbName(), tbl.getTableName(), + alterPartition(HiveTableName.of(tbl), new Partition(tbl, tpart), null, true); } @@ -3569,11 +3565,6 @@ public boolean dropPartition(String dbName, String tableName, List parti } } - public List getPartitionNames(String tblName, short max) throws HiveException { - String[] names = Utilities.getDbTableName(tblName); - return getPartitionNames(names[0], names[1], max); - } - public List getPartitionNames(String dbName, String tblName, short max) throws HiveException { List names = null; @@ -5247,8 +5238,7 @@ public boolean deletePartitionColumnStatistics(String dbName, String tableName, } public Table newTable(String tableName) throws HiveException { - String[] names = Utilities.getDbTableName(tableName); - return new Table(names[0], names[1]); + return new Table(HiveTableName.of(tableName)); } public String getDelegationToken(String owner, String renewer) diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java index 3dcf876af3..811f5ee4e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java @@ -40,6 +40,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.io.HdfsUtils; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; @@ -233,6 +234,11 @@ public void truncateTable(String dbName, String tableName, } } + @Override + public org.apache.hadoop.hive.metastore.api.Table getTable(TableName tableName) throws TException { + return getTable(tableName.getCat(), tableName.getDb(), tableName.getTable()); + } + @Override public List getAllTables(String dbName) throws MetaException { List tableNames = super.getAllTables(dbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index b9bb3abbe5..42a4098078 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -37,6 +37,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; @@ -157,6 +158,10 @@ public Table(String databaseName, String tableName) { this(getEmptyTable(databaseName, tableName)); } + public Table(TableName tableName) { + this(getEmptyTable(tableName.getDb(), tableName.getTable())); + } + /** This api is used by getMetaData which require deep copy of metastore.api.table * and constraints copy */ @@ -784,6 +789,17 @@ public void setFields(List fields) { tTable.getSd().setCols(fields); } + /** + * Set the table metadata based on a {@link TableName} object. + * @param tableName the tableName object + * @return this + */ + public Table setFrom(TableName tableName) { + this.setDbName(tableName.getDb()); + this.setTableName(tableName.getTable()); + return this; + } + public void setNumBuckets(int nb) { tTable.getSd().setNumBuckets(nb); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java index 9fc0416edb..59e13ed4df 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java @@ -20,6 +20,7 @@ import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Table; @@ -47,9 +48,8 @@ public static boolean isPartitionLevelStats(ASTNode tree) { public static Table getTable(ASTNode tree, BaseSemanticAnalyzer sa) throws SemanticException { String tableName = ColumnStatsSemanticAnalyzer.getUnescapedName((ASTNode) tree.getChild(0).getChild(0)); - String currentDb = SessionState.get().getCurrentDatabase(); - String [] names = Utilities.getDbTableName(currentDb, tableName); - return sa.getTable(names[0], names[1], true); + TableName tName = HiveTableName.of(tableName); + return sa.getTable(tName); } public static Map getPartKeyValuePairsFromAST(Table tbl, ASTNode tree, diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 46bb37a0c2..d5d4b43585 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -394,7 +394,6 @@ public static String getUnescapedName(ASTNode tableOrColumnNode, String currentD // table node Map.Entry dbTablePair = getDbTableNamePair(tableOrColumnNode); return TableName.fromString(dbTablePair.getValue(), - null, dbTablePair.getKey() == null ? currentDatabase : dbTablePair.getKey()) .getNotEmptyDbTable(); } else if (tokenType == HiveParser.StringLiteral) { @@ -435,14 +434,14 @@ public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalo throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName, dbName); + return TableName.fromString(tableName, dbName); } final String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); if (tableName.contains(".")) { throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName); + return HiveTableName.of(tableName); } /** @@ -1017,7 +1016,7 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit try { // get table metadata - tableName = HiveTableName.withNoDefault(getUnescapedName((ASTNode)ast.getChild(0))); + tableName = TableName.fromString(getUnescapedName((ASTNode)ast.getChild(0)), null); boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE); if (testMode) { tableName = TableName.fromString(String.join("", conf.getVar(HiveConf.ConfVars.HIVETESTMODEPREFIX), @@ -1654,14 +1653,14 @@ protected Table getTable(String database, String tblName, boolean throwException : db.getTable(database, tblName, false); } catch (InvalidTableException e) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable()), e); + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, database).getNotEmptyDbTable()), e); } catch (Exception e) { throw new SemanticException(e.getMessage(), e); } if (tab == null && throwException) { // getTable needs a refactor with all ~50 occurences - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable())); + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, database).getNotEmptyDbTable())); } return tab; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index c313d8e8d0..25b5dc0aa1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -113,6 +113,7 @@ import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -5203,18 +5204,15 @@ private QBParseInfo getQBParseInfo(QB qb) throws CalciteSemanticException { @Override protected Table getTableObjectByName(String tabName, boolean throwException) throws HiveException { - String[] names = Utilities.getDbTableName(tabName); - final String tableName = names[1]; - final String dbName = names[0]; - final String fullyQualName = dbName + "." + tableName; - if (!tabNameToTabObject.containsKey(fullyQualName)) { - Table table = db.getTable(dbName, tableName, throwException); + final TableName tName = TableName.fromString(tabName, null); + if (!tabNameToTabObject.containsKey(tName.toString())) { + Table table = db.getTable(tName, throwException); if (table != null) { - tabNameToTabObject.put(fullyQualName, table); + tabNameToTabObject.put(tName.toString(), table); } return table; } - return tabNameToTabObject.get(fullyQualName); + return tabNameToTabObject.get(tName.toString()); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index ba019c7553..8a66e8a148 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -253,7 +253,7 @@ private void analyzeAlterTableUpdateStats(ASTNode ast, TableName tblName, Map par // doesn't have enabled constraint since constraints are disallowed with such tables else if (entry.getKey().equals("external") && entry.getValue().equals("true")) { if (hasConstraintsEnabled(tableName.getTable())) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Table: " + tableName.getDbTable() + " has constraints enabled." + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg( + "Table: " + tableName.getNotEmptyDbTable() + " has constraints enabled." + "Please remove those constraints to change this property.")); } } @@ -775,7 +781,7 @@ private void analyzeAlterTableRename(TableName source, ASTNode ast, boolean expe throws SemanticException { final TableName target = getQualifiedTableName((ASTNode) ast.getChild(0)); - AlterTableRenameDesc alterTblDesc = new AlterTableRenameDesc(source, null, expectView, target.getDbTable()); + AlterTableRenameDesc alterTblDesc = new AlterTableRenameDesc(source, null, expectView, target); Table table = getTable(source.getDbTable(), true); if (AcidUtils.isTransactionalTable(table)) { setAcidDdlDesc(alterTblDesc); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java index cd9f88c53b..6da876a250 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveTableName.java @@ -38,37 +38,22 @@ public HiveTableName(String catName, String dbName, String tableName) { * @throws SemanticException */ public static TableName of(Table table) throws SemanticException { - return ofNullable(table.getTableName(), table.getDbName()); + return ofNullable(table.getTableName(), table.getDbName()); // FIXME: this shouldn't call nullable } /** - * Set a @{@link Table} object's table and db names based on the provided string. - * @param dbTable the dbtable string + * Set a @{@link Table} object's table and db names based on the provided tableName object. + * @param tableName the tableName object * @param table the table to update * @return the table * @throws SemanticException */ - public static Table setFrom(String dbTable, Table table) throws SemanticException{ - TableName name = ofNullable(dbTable); - table.setTableName(name.getTable()); - table.setDbName(name.getDb()); + public static Table setFrom(TableName tableName, Table table) throws SemanticException{ + table.setTableName(tableName.getTable()); + table.setDbName(tableName.getDb()); return table; } - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. All parts can be null. - * - * @param dbTableName - * @return a {@link TableName} - * @throws SemanticException - * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} - */ - // to be @Deprecated - public static TableName ofNullable(String dbTableName) throws SemanticException { - return ofNullable(dbTableName, SessionState.get().getCurrentDatabase()); - } - /** * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a * {@link TableName}. All parts can be null. This method won't try to find the default db based on the session state. @@ -93,37 +78,21 @@ public static TableName ofNullableWithNoDefault(String dbTableName) throws Seman * @throws SemanticException * @deprecated use {@link #of(String)} or {@link #fromString(String, String, String)} */ - // to be @Deprecated - public static TableName ofNullable(String dbTableName, String defaultDb) throws SemanticException { + @Deprecated + private static TableName ofNullable(String dbTableName, String defaultDb) throws SemanticException { // TODO: remove if (dbTableName == null) { return new TableName(null, null, null); } else { try { - return fromString(dbTableName, SessionState.get().getCurrentCatalog(), defaultDb); + // if a db is null, so should the catalog be. A workaround, while ofNullable exists at all + final String cat = defaultDb == null || defaultDb.trim().isEmpty() ? null : SessionState.get().getCurrentCatalog(); + return fromString(dbTableName, cat, defaultDb); } catch (IllegalArgumentException e) { throw new SemanticException(e); } } } - /** - * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a - * {@link TableName}. This method won't try to find the default db/catalog based on the session state. - * - * @param dbTableName not null - * @return a {@link TableName} - * @throws SemanticException if dbTableName is null - * @deprecated use {@link #of(String)} instead and use the default db/catalog. - */ - // to be @Deprecated - public static TableName withNoDefault(String dbTableName) throws SemanticException { - try { - return fromString(dbTableName, null, null); - } catch (IllegalArgumentException e) { - throw new SemanticException(e); - } - } - /** * Accepts qualified name which is in the form of table, dbname.tablename or catalog.dbname.tablename and returns a * {@link TableName}. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index dd97f3d7b6..9af82d0287 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -328,7 +328,7 @@ public static boolean prepareImport(boolean isImportCmd, } if (StringUtils.isNotBlank(parsedTableName)) { - tblDesc.setTableName(TableName.fromString(parsedTableName, null, dbname)); + tblDesc.setTableName(TableName.fromString(parsedTableName, dbname)); } if (tblDesc.getTableName() == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index fed890f031..94b9c7a078 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2307,9 +2307,9 @@ private void getMetaData(QB qb, ReadEntity parentInput) // Whether we are using an acid compliant transaction manager has already been caught in // UpdateDeleteSemanticAnalyzer, so if we are updating or deleting and getting nonAcid // here, it means the table itself doesn't support it. - throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.getTableName().getTable()); + throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TABLE, ts.getTableName().toString()); } else { - throw new SemanticException(ErrorMsg.ACID_OP_ON_INSERTONLYTRAN_TABLE, ts.getTableName().getTable()); + throw new SemanticException(ErrorMsg.ACID_OP_ON_INSERTONLYTRAN_TABLE, ts.getTableName().toString()); } } // TableSpec ts is got from the query (user specified), @@ -2350,14 +2350,13 @@ private void getMetaData(QB qb, ReadEntity parentInput) location = new Path(qb.getTableDesc().getLocation()); } else { // allocate a temporary output dir on the location of the table - String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - String[] names = Utilities.getDbTableName(tableName); + TableName tableName = HiveTableName.of(getUnescapedName((ASTNode) ast.getChild(0))); try { Warehouse wh = new Warehouse(conf); //Use destination table's db location. String destTableDb = qb.getTableDesc() != null ? qb.getTableDesc().getDatabaseName() : null; if (destTableDb == null) { - destTableDb = names[0]; + destTableDb = tableName.getDb(); } location = wh.getDatabasePath(db.getDatabase(destTableDb)); } catch (MetaException e) { @@ -7076,7 +7075,7 @@ private Operator genMaterializedViewDataOrgPlan(List sortColInfos, L } private void setStatsForNonNativeTable(String dbName, String tableName) throws SemanticException { - TableName qTableName = HiveTableName.ofNullable(tableName, dbName); + TableName qTableName = TableName.fromString(tableName, dbName); Map mapProp = new HashMap<>(); mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); AlterTableUnsetPropertiesDesc alterTblDesc = new AlterTableUnsetPropertiesDesc(qTableName, null, null, false, @@ -7658,7 +7657,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) fileSinkColInfos = new ArrayList<>(); destTableIsTemporary = tblDesc.isTemporary(); destTableIsMaterialization = tblDesc.isMaterialization(); - tableName = TableName.fromString(tblDesc.getDbTableName(), null, tblDesc.getDatabaseName()); + tableName = tblDesc.getTableName(); tblProps = tblDesc.getTblProps(); } else if (viewDesc != null) { fieldSchemas = new ArrayList<>(); @@ -8256,7 +8255,7 @@ private void handleLineage(LoadTableDesc ltd, Operator output) } else if ( queryState.getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) { Path tlocation = null; - String tName = Utilities.getDbTableName(tableDesc.getDbTableName())[1]; + String tName = tableDesc.getTableName().getTable(); try { Warehouse wh = new Warehouse(conf); tlocation = wh.getDefaultTablePath(db.getDatabase(tableDesc.getDatabaseName()), @@ -12894,7 +12893,8 @@ protected void saveViewDefinition() throws SemanticException { sb.append(" FROM ("); sb.append(expandedText); sb.append(") "); - sb.append(HiveUtils.unparseIdentifier(Utilities.getDbTableName(createVwDesc.getViewName())[1], conf)); + final String viewName = HiveTableName.of(createVwDesc.getViewName()).getTable(); + sb.append(HiveUtils.unparseIdentifier(viewName, conf)); expandedText = sb.toString(); } } else { @@ -12928,7 +12928,8 @@ protected void saveViewDefinition() throws SemanticException { sb.append(" FROM ("); sb.append(expandedText); sb.append(") "); - sb.append(HiveUtils.unparseIdentifier(Utilities.getDbTableName(createVwDesc.getViewName())[1], conf)); + final String viewName = HiveTableName.of(createVwDesc.getViewName()).getTable(); + sb.append(HiveUtils.unparseIdentifier(viewName, conf)); expandedText = sb.toString(); } @@ -13452,7 +13453,6 @@ private boolean hasConstraints(final List partCols, final List cols = new ArrayList(); @@ -13490,7 +13490,7 @@ ASTNode analyzeCreateTable( RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); - LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine()); + LOG.info("Creating table " + qualifiedTabName + " position=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); /* @@ -13688,10 +13688,9 @@ ASTNode analyzeCreateTable( throw new SemanticException( "Partition columns can only declared using their name and types in regular CREATE TABLE statements"); } - tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()}, - TableType.MANAGED_TABLE, isTemporary, tblProps); + tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, qualifiedTabName.toString(), sortCols, + isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); CreateTableDesc crtTblDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, @@ -13714,12 +13713,11 @@ ASTNode analyzeCreateTable( case ctt: // CREATE TRANSACTIONAL TABLE if (isExt) { throw new SemanticException( - qualifiedTabName.getTable() + " cannot be declared transactional because it's an external table"); + qualifiedTabName.getNotEmptyDbTable() + " cannot be declared transactional because it's an external table"); } - tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, - isTemporary, isTransactional); - addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()}, - TableType.MANAGED_TABLE, false, tblProps); + tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, qualifiedTabName.toString(), sortCols, + isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, false, tblProps); CreateTableDesc crtTranTblDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, @@ -13740,9 +13738,8 @@ ASTNode analyzeCreateTable( case CTLT: // create table like tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()}, - TableType.MANAGED_TABLE, isTemporary, tblProps); + tblProps, isExt, storageFormat, qualifiedTabName.toString(), sortCols, isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); Table likeTable = getTable(likeTableName, false); if (likeTable != null) { @@ -13754,7 +13751,7 @@ ASTNode analyzeCreateTable( updateDefaultTblProps(likeTable.getParameters(), tblProps, null); } } - CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(dbDotTab, isExt, isTemporary, + CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(qualifiedTabName.toString(), isExt, isTemporary, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists, likeTableName, isUserStorageFormat); @@ -13781,9 +13778,9 @@ ASTNode analyzeCreateTable( // Verify that the table does not already exist // dumpTable is only used to check the conflict for non-temporary tables try { - Table dumpTable = db.newTable(dbDotTab); + Table dumpTable = new Table(qualifiedTabName); if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx.isExplainSkipExecution()) { - throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTab)); + throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(qualifiedTabName.toString())); } } catch (HiveException e) { throw new SemanticException(e); @@ -13825,9 +13822,8 @@ ASTNode analyzeCreateTable( } tblProps = validateAndAddDefaultProperties( - tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional); - addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()}, - TableType.MANAGED_TABLE, isTemporary, tblProps); + tblProps, isExt, storageFormat, qualifiedTabName.toString(), sortCols, isMaterialization, isTemporary, isTransactional); + addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps); tableDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partColNames, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, @@ -13850,12 +13846,12 @@ ASTNode analyzeCreateTable( } /** Adds entities for create table/create view. */ - private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, + private void addDbAndTabToOutputs(TableName tableName, TableType type, boolean isTemporary, Map tblProps) throws SemanticException { - Database database = getDatabase(qualifiedTabName[0]); + Database database = getDatabase(tableName.getDb()); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED)); - Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]); + Table t = new Table(tableName); t.setParameters(tblProps); t.setTableType(type); t.setTemporary(isTemporary); @@ -13864,7 +13860,6 @@ private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCtx) throws SemanticException { TableName qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0)); - final String dbDotTable = qualTabName.getNotEmptyDbTable(); List cols = null; boolean ifNotExists = false; boolean rewriteEnabled = true; @@ -13882,7 +13877,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); - LOG.info("Creating view " + dbDotTable + " position=" + LOG.info("Creating view " + qualTabName + " position=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); for (int num = 1; num < numCh; num++) { @@ -13964,9 +13959,9 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt // Verify that the table does not already exist // dumpTable is only used to check the conflict for non-temporary tables try { - Table dumpTable = db.newTable(dbDotTable); + Table dumpTable = new Table(qualTabName); if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx.isExplainSkipExecution()) { - throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTable)); + throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(qualTabName.toString())); } } catch (HiveException e) { throw new SemanticException(e); @@ -14007,26 +14002,24 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt if (tblProps == null) { tblProps = new HashMap<>(); } - tblProps = convertToAcidByDefault(storageFormat, dbDotTable, null, tblProps); + tblProps = convertToAcidByDefault(storageFormat, qualTabName.toString(), null, tblProps); } createVwDesc = new CreateViewDesc( - dbDotTable, cols, comment, tblProps, partColNames, sortColNames, distributeColNames, + qualTabName.toString(), cols, comment, tblProps, partColNames, sortColNames, distributeColNames, ifNotExists, isRebuild, rewriteEnabled, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps()); - addDbAndTabToOutputs(new String[] {qualTabName.getDb(), qualTabName.getTable()}, TableType.MATERIALIZED_VIEW, - false, tblProps); + addDbAndTabToOutputs(qualTabName, TableType.MATERIALIZED_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATE_MATERIALIZED_VIEW); } else { createVwDesc = new CreateViewDesc( - dbDotTable, cols, comment, tblProps, partColNames, + qualTabName.toString(), cols, comment, tblProps, partColNames, ifNotExists, orReplace, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), storageFormat.getSerde()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createVwDesc))); - addDbAndTabToOutputs(new String[] {qualTabName.getDb(), qualTabName.getTable()}, - TableType.VIRTUAL_VIEW, false, tblProps); + addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATEVIEW); } qb.setViewDesc(createVwDesc); @@ -15445,7 +15438,7 @@ protected String getFullTableNameForSQL(ASTNode n) throws SemanticException { switch (n.getType()) { case HiveParser.TOK_TABNAME: TableName tableName = getQualifiedTableName(n); - return HiveTableName.ofNullable(HiveUtils.unparseIdentifier(tableName.getTable(), this.conf), + return TableName.fromString(HiveUtils.unparseIdentifier(tableName.getTable(), this.conf), HiveUtils.unparseIdentifier(tableName.getDb(), this.conf)).getNotEmptyDbTable(); case HiveParser.TOK_TABREF: return getFullTableNameForSQL((ASTNode) n.getChild(0)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 2f3fc6c50a..e4d88927d6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.HiveStatsUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -479,18 +480,19 @@ private Path getDefaultCtasLocation(final ParseContext pCtx) throws SemanticExce try { String protoName = null; boolean isExternal = false; + TableName tableName = null; if (pCtx.getQueryProperties().isCTAS()) { - protoName = pCtx.getCreateTable().getDbTableName(); + tableName = pCtx.getCreateTable().getTableName(); isExternal = pCtx.getCreateTable().isExternal(); } else if (pCtx.getQueryProperties().isMaterializedView()) { protoName = pCtx.getCreateViewDesc().getViewName(); + tableName = HiveTableName.of(protoName); } - String[] names = Utilities.getDbTableName(protoName); - if (!db.databaseExists(names[0])) { - throw new SemanticException("ERROR: The database " + names[0] + " does not exist."); + if (!db.databaseExists(tableName.getDb())) { + throw new SemanticException("ERROR: The database " + tableName.getDb() + " does not exist."); } Warehouse wh = new Warehouse(conf); - return wh.getDefaultTablePath(db.getDatabase(names[0]), names[1], isExternal); + return wh.getDefaultTablePath(db.getDatabase(tableName.getDb()), tableName.getTable(), isExternal); } catch (HiveException e) { throw new SemanticException(e); } catch (MetaException e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java index 97a1dd31a7..6d9dac8ea1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java @@ -154,7 +154,7 @@ private void writeData(PartitionIterable partitions) throws SemanticException { if (tableSpec.tableHandle.isPartitioned()) { if (partitions == null) { throw new IllegalStateException("partitions cannot be null for partitionTable :" - + tableSpec.getTableName().getTable()); + + tableSpec.getTableName().getNotEmptyDbTable()); } new PartitionExport(paths, partitions, distCpDoAsUser, conf, mmCtx).write(replicationSpec); } else { @@ -316,7 +316,7 @@ public AuthEntities getAuthEntities() throws SemanticException { if (tableSpec.tableHandle.isPartitioned()) { if (partitions == null) { throw new IllegalStateException("partitions cannot be null for partitionTable :" - + tableSpec.getTableName().getTable()); + + tableSpec.getTableName().getNotEmptyDbTable()); } for (Partition partition : partitions) { authEntities.inputs.add(new ReadEntity(partition)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java index 6f98373c9e..2602b231c6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java @@ -56,7 +56,7 @@ final String actualDbName = context.isDbNameEmpty() ? fks.get(0).getFktable_db() : context.dbName; final String actualTblName = fks.get(0).getFktable_name(); - final TableName tName = TableName.fromString(actualTblName, null, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); for (SQLForeignKey fk : fks) { // If parent table is in the same database, change it to the actual db on destination diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java index 995c5d2f84..01458cd207 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java @@ -56,7 +56,7 @@ final String actualDbName = context.isDbNameEmpty() ? nns.get(0).getTable_db() : context.dbName; final String actualTblName = nns.get(0).getTable_name(); - final TableName tName = TableName.fromString(actualTblName, null, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); for (SQLNotNullConstraint nn : nns) { nn.setTable_db(actualDbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java index f6decc27fc..d057e7ba21 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java @@ -56,7 +56,7 @@ final String actualDbName = context.isDbNameEmpty() ? pks.get(0).getTable_db() : context.dbName; final String actualTblName = pks.get(0).getTable_name(); - final TableName tName = TableName.fromString(actualTblName, null, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); for (SQLPrimaryKey pk : pks) { pk.setTable_db(actualDbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java index e1c1d3a180..3d5ffdb55e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java @@ -56,7 +56,7 @@ final String actualDbName = context.isDbNameEmpty() ? uks.get(0).getTable_db() : context.dbName; final String actualTblName = uks.get(0).getTable_name(); - final TableName tName = TableName.fromString(actualTblName, null, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); for (SQLUniqueConstraint uk : uks) { uk.setTable_db(actualDbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java index 34d3b00500..ec2aa6938a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java @@ -23,10 +23,8 @@ import org.apache.hadoop.hive.ql.ddl.table.constraint.drop.AlterTableDropConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; -import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; -import java.io.Serializable; import java.util.Collections; import java.util.List; @@ -37,7 +35,7 @@ DropConstraintMessage msg = deserializer.getDropConstraintMessage(context.dmd.getPayload()); final String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName; final String actualTblName = msg.getTable(); - final TableName tName = HiveTableName.ofNullable(actualTblName, actualDbName); + final TableName tName = TableName.fromString(actualTblName, actualDbName); String constraintName = msg.getConstraint(); AlterTableDropConstraintDesc dropConstraintsDesc = diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java index 066549d9cd..df7342d605 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.parse.repl.load.message; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.messaging.DropPartitionMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.partition.drop.AlterTableDropPartitionDesc; @@ -24,11 +25,9 @@ import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import java.io.Serializable; import java.util.Collections; import java.util.List; import java.util.Map; @@ -45,7 +44,7 @@ ReplUtils.genPartSpecs(new Table(msg.getTableObj()), msg.getPartitions()); if (partSpecs.size() > 0) { AlterTableDropPartitionDesc dropPtnDesc = - new AlterTableDropPartitionDesc(HiveTableName.ofNullable(actualTblName, actualDbName), partSpecs, true, + new AlterTableDropPartitionDesc(TableName.fromString(actualTblName, null, actualDbName), partSpecs, true, context.eventOnlyReplicationSpec()); Task dropPtnTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc), context.hiveConf diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java index 82e50ff442..fd5c68f9a6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java @@ -55,14 +55,14 @@ } } - TableName oldName = TableName.fromString(tableObjBefore.getTableName(), null, oldDbName); - TableName newName = TableName.fromString(tableObjAfter.getTableName(), null, newDbName); + TableName oldName = TableName.fromString(tableObjBefore.getTableName(), oldDbName); + TableName newName = TableName.fromString(tableObjAfter.getTableName(), newDbName); ReplicationSpec replicationSpec = context.eventOnlyReplicationSpec(); if (ReplUtils.isTableMigratingToTransactional(context.hiveConf, tableObjAfter)) { replicationSpec.setMigratingToTxnTable(); } AlterTableRenameDesc renameTableDesc = - new AlterTableRenameDesc(oldName, replicationSpec, false, newName.getNotEmptyDbTable()); + new AlterTableRenameDesc(oldName, replicationSpec, false, newName); renameTableDesc.setWriteId(msg.getWriteId()); Task renameTableTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, renameTableDesc), context.hiveConf); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java index 25e524af37..63cdae9ebe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java @@ -37,7 +37,7 @@ @Override public List> handle(Context context) throws SemanticException { AlterPartitionMessage msg = deserializer.getAlterPartitionMessage(context.dmd.getPayload()); - final TableName tName = TableName.fromString(msg.getTable(), null, + final TableName tName = TableName.fromString(msg.getTable(), context.isDbNameEmpty() ? msg.getDB() : context.dbName); Map partSpec = new LinkedHashMap<>(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java index 35b8e0e684..103351a48b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java @@ -33,7 +33,7 @@ @Override public List> handle(Context context) throws SemanticException { AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload()); - final TableName tName = TableName.fromString(msg.getTable(), null, + final TableName tName = TableName.fromString(msg.getTable(), context.isDbNameEmpty() ? msg.getDB() : context.dbName); TruncateTableDesc truncateTableDesc = new TruncateTableDesc(tName, null, context.eventOnlyReplicationSpec()); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java index 20f7d2e0e4..40bff112c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java @@ -197,7 +197,7 @@ public String getTableName() { } else if (work.getTableSpecs() != null) { return work.getTableSpecs().getTableName().getTable(); } else if (getLoadFileDesc().getCtasCreateTableDesc() != null) { - return getLoadFileDesc().getCtasCreateTableDesc().getDbTableName(); + return getLoadFileDesc().getCtasCreateTableDesc().getTableName().toString(); } else { return getLoadFileDesc().getCreateViewDesc().getViewName(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java index c90ea437f5..c79f4971da 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java @@ -21,6 +21,7 @@ import java.io.Serializable; import java.util.Map; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -39,8 +40,7 @@ private static final long serialVersionUID = 1L; private final String partName; private final Map mapProp; - private final String dbName; - private final String tableName; + private final TableName tableName; private final String colName; private final String colType; private final ColumnStatistics colStats; @@ -50,13 +50,11 @@ public ColumnStatsUpdateWork(String partName, Map mapProp, - String dbName, - String tableName, + TableName tableName, String colName, String colType) { this.partName = partName; this.mapProp = mapProp; - this.dbName = dbName; this.tableName = tableName; this.colName = colName; this.colType = colType; @@ -69,8 +67,7 @@ public ColumnStatsUpdateWork(ColumnStatistics colStats, boolean isMigratingToTxn this.isMigratingToTxn = isMigratingToTxn; this.partName = null; this.mapProp = null; - this.dbName = null; - this.tableName = null; + this.tableName = null; // FIXME: This won't do this.colName = null; this.colType = null; } @@ -88,11 +85,7 @@ public String getPartName() { return mapProp; } - public String dbName() { - return dbName; - } - - public String getTableName() { + public TableName getTableName() { return tableName; } @@ -117,7 +110,7 @@ public void setWriteId(long writeId) { @Override public String getFullTableName() { - return dbName + "." + tableName; + return tableName.getNotEmptyDbTable(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index dc7040e388..b441b44309 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -37,7 +37,6 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -56,7 +55,7 @@ public ImportTableDesc(String dbName, Table table) throws Exception { this.dbName = dbName; this.table = table; - final TableName tableName = HiveTableName.ofNullable(table.getTableName(), dbName); + final TableName tableName = TableName.fromString(table.getTableName(), dbName); switch (getDescType()) { case TABLE: @@ -213,7 +212,7 @@ public String getTableName() throws SemanticException { case TABLE: return createTblDesc.getTableName().getTable(); case VIEW: - return TableName.fromString(createViewDesc.getViewName(), null, null).getTable(); + return TableName.fromString(createViewDesc.getViewName(), null).getTable(); } return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index 980f39b681..95f256c0d3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -370,9 +370,9 @@ public static TableDesc getTableDesc(CreateTableDesc crtTblDesc, String cols, crtTblDesc.getNullFormat()); } - if (crtTblDesc.getDbTableName() != null && crtTblDesc.getDatabaseName() != null) { + if (crtTblDesc.getTableName() != null && crtTblDesc.getDatabaseName() != null) { properties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME, - crtTblDesc.getDbTableName()); + crtTblDesc.getTableName().getNotEmptyDbTable()); } if (crtTblDesc.getTblProps() != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java index 537b9de5db..cc53684638 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java @@ -20,11 +20,13 @@ import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; @@ -64,7 +66,8 @@ public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjec dbTable = new String[] {null, null}; } else { if (privSubjectDesc.getTable()) { - dbTable = Utilities.getDbTableName(privSubjectDesc.getObject()); + final TableName tn = HiveTableName.of(privSubjectDesc.getObject()); + dbTable = new String[] {tn.getDb(), tn.getTable()}; } else { dbTable = new String[] {privSubjectDesc.getObject(), null}; } diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsNoJobTask.java ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsNoJobTask.java index 53b3065a88..8bd9280a2b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsNoJobTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsNoJobTask.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; @@ -46,6 +47,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.plan.BasicStatsNoJobWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -314,7 +316,7 @@ private int aggregateStats(ExecutorService threadPool, Hive db) { private int updatePartitions(Hive db, List scs, Table table) throws InvalidOperationException, HiveException { - String tableFullName = table.getFullyQualifiedName(); + TableName tableName = HiveTableName.of(table); if (scs.isEmpty()) { return 0; @@ -342,13 +344,13 @@ private int updatePartitions(Hive db, List scs, Table table LOG.debug("Collectors.size(): {}", collectorsByTable.keySet()); if (collectorsByTable.keySet().size() < 1) { - LOG.warn("Collectors are empty! ; {}", tableFullName); + LOG.warn("Collectors are empty! ; {}", tableName); } // for now this should be true... assert (collectorsByTable.keySet().size() <= 1); - LOG.debug("Updating stats for: {}", tableFullName); + LOG.debug("Updating stats for: {}", tableName); for (String partName : collectorsByTable.keySet()) { ImmutableList values = collectorsByTable.get(partName); @@ -358,19 +360,19 @@ private int updatePartitions(Hive db, List scs, Table table } if (values.get(0).result instanceof Table) { - db.alterTable(tableFullName, (Table) values.get(0).result, environmentContext, true); - LOG.debug("Updated stats for {}.", tableFullName); + db.alterTable(tableName, (Table) values.get(0).result, environmentContext, true); + LOG.debug("Updated stats for {}.", tableName); } else { if (values.get(0).result instanceof Partition) { List results = Lists.transform(values, FooterStatCollector.EXTRACT_RESULT_FUNCTION); - db.alterPartitions(tableFullName, results, environmentContext, true); - LOG.debug("Bulk updated {} partitions of {}.", results.size(), tableFullName); + db.alterPartitions(tableName, results, environmentContext, true); + LOG.debug("Bulk updated {} partitions of {}.", results.size(), tableName); } else { throw new RuntimeException("inconsistent"); } } } - LOG.debug("Updated stats for: {}", tableFullName); + LOG.debug("Updated stats for: {}", tableName); return 0; } diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsTask.java ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsTask.java index 6eb1ca2645..0688e24482 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/BasicStatsTask.java @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.Warehouse; @@ -50,6 +51,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec; +import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.plan.BasicStatsWork; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.LoadTableDesc; @@ -250,7 +252,7 @@ private int aggregateStats(Hive db) { List partitions = getPartitionsList(db); - String tableFullName = table.getDbName() + "." + table.getTableName(); + TableName tableName = HiveTableName.of(table); List partishes = new ArrayList<>(); @@ -264,12 +266,12 @@ private int aggregateStats(Hive db) { if (res == null) { return 0; } - db.alterTable(tableFullName, res, environmentContext, true); + db.alterTable(tableName, res, environmentContext, true); if (conf.getBoolVar(ConfVars.TEZ_EXEC_SUMMARY)) { - console.printInfo("Table " + tableFullName + " stats: [" + toString(p.getPartParameters()) + ']'); + console.printInfo("Table " + tableName + " stats: [" + toString(p.getPartParameters()) + ']'); } - LOG.info("Table " + tableFullName + " stats: [" + toString(p.getPartParameters()) + ']'); + LOG.info("Table " + tableName + " stats: [" + toString(p.getPartParameters()) + ']'); } else { // Partitioned table: @@ -332,7 +334,7 @@ public Void call() throws Exception { } if (!updates.isEmpty()) { - db.alterPartitions(tableFullName, updates, environmentContext, true); + db.alterPartitions(tableName, updates, environmentContext, true); } if (work.isStatsReliable() && updates.size() != processors.size()) { LOG.info("Stats should be reliadble...however seems like there were some issue.. => ret 1"); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index 18b5f270d8..17ba311a3e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -126,35 +126,6 @@ public void testSerializeTimestamp() { SerializationUtilities.serializeExpression(desc)).getExprString()); } - @Test - public void testgetDbTableName() throws HiveException{ - String tablename; - String [] dbtab; - SessionState.start(new HiveConf(this.getClass())); - String curDefaultdb = SessionState.get().getCurrentDatabase(); - - //test table without db portion - tablename = "tab1"; - dbtab = Utilities.getDbTableName(tablename); - assertEquals("db name", curDefaultdb, dbtab[0]); - assertEquals("table name", tablename, dbtab[1]); - - //test table with db portion - tablename = "dab1.tab1"; - dbtab = Utilities.getDbTableName(tablename); - assertEquals("db name", "dab1", dbtab[0]); - assertEquals("table name", "tab1", dbtab[1]); - - //test invalid table name - tablename = "dab1.tab1.x1"; - try { - dbtab = Utilities.getDbTableName(tablename); - fail("exception was expected for invalid table name"); - } catch(HiveException ex){ - assertEquals("Invalid table name " + tablename, ex.getMessage()); - } - } - @Test public void testReplaceTaskId() { String taskID = "000000"; diff --git ql/src/test/results/clientnegative/create_external_transactional.q.out ql/src/test/results/clientnegative/create_external_transactional.q.out index 6a0f5c14bf..dcd42096d2 100644 --- ql/src/test/results/clientnegative/create_external_transactional.q.out +++ ql/src/test/results/clientnegative/create_external_transactional.q.out @@ -1 +1 @@ -FAILED: SemanticException transactional_external cannot be declared transactional because it's an external table +FAILED: SemanticException default.transactional_external cannot be declared transactional because it's an external table diff --git ql/src/test/results/clientnegative/delete_non_acid_table.q.out ql/src/test/results/clientnegative/delete_non_acid_table.q.out index dafac6d7df..19fd5fb426 100644 --- ql/src/test/results/clientnegative/delete_non_acid_table.q.out +++ ql/src/test/results/clientnegative/delete_non_acid_table.q.out @@ -34,4 +34,4 @@ POSTHOOK: Input: default@not_an_acid_table2 -1070883071 0ruyd6Y50JpdGRf6HqD -1070551679 iUR3Q -1069736047 k17Am8uPHWk02cEf1jet -FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table not_an_acid_table2 that is not transactional +FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table default.not_an_acid_table2 that is not transactional diff --git ql/src/test/results/clientnegative/mm_delete.q.out ql/src/test/results/clientnegative/mm_delete.q.out index d0fd905673..ed7bafba98 100644 --- ql/src/test/results/clientnegative/mm_delete.q.out +++ ql/src/test/results/clientnegative/mm_delete.q.out @@ -65,4 +65,4 @@ POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@mm_srcpart@ds=2008-04-08/hr=11 POSTHOOK: Lineage: mm_srcpart PARTITION(ds=2008-04-08,hr=11).key SCRIPT [] POSTHOOK: Lineage: mm_srcpart PARTITION(ds=2008-04-08,hr=11).value SCRIPT [] -FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table mm_srcpart that is insert-only transactional +FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table default.mm_srcpart that is insert-only transactional diff --git ql/src/test/results/clientnegative/mm_update.q.out ql/src/test/results/clientnegative/mm_update.q.out index 528d16269f..946ffd1598 100644 --- ql/src/test/results/clientnegative/mm_update.q.out +++ ql/src/test/results/clientnegative/mm_update.q.out @@ -55,4 +55,4 @@ POSTHOOK: Input: default@mm_srcpart@ds=2008-04-09/hr=11 2008-04-09 11 43 val_43 2008-04-09 11 413 val_413 2008-04-09 11 413 val_413 -FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table mm_srcpart that is insert-only transactional +FAILED: SemanticException [Error 10414]: Attempt to do update or delete on table default.mm_srcpart that is insert-only transactional diff --git ql/src/test/results/clientnegative/spark/groupby_grouping_sets7.q.out ql/src/test/results/clientnegative/spark/groupby_grouping_sets7.q.out index 226de5ab1a..9866221f19 100644 --- ql/src/test/results/clientnegative/spark/groupby_grouping_sets7.q.out +++ ql/src/test/results/clientnegative/spark/groupby_grouping_sets7.q.out @@ -1,9 +1,9 @@ PREHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1 +PREHOOK: Output: default@t1 POSTHOOK: query: CREATE TABLE T1(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1 +POSTHOOK: Output: default@t1 FAILED: SemanticException [Error 10225]: An additional MR job is introduced since the number of rows created per input row due to grouping sets is more than hive.new.job.grouping.set.cardinality. There is no need to handle skew separately. set hive.groupby.skewindata to false. The number of rows per input row due to grouping sets is 4 diff --git ql/src/test/results/clientnegative/update_non_acid_table.q.out ql/src/test/results/clientnegative/update_non_acid_table.q.out index 64164ba4ed..02946fc185 100644 --- ql/src/test/results/clientnegative/update_non_acid_table.q.out +++ ql/src/test/results/clientnegative/update_non_acid_table.q.out @@ -34,4 +34,4 @@ POSTHOOK: Input: default@not_an_acid_table -1070883071 0ruyd6Y50JpdGRf6HqD -1070551679 iUR3Q -1069736047 k17Am8uPHWk02cEf1jet -FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table not_an_acid_table that is not transactional +FAILED: SemanticException [Error 10297]: Attempt to do update or delete on table default.not_an_acid_table that is not transactional diff --git ql/src/test/results/clientpositive/acid_bloom_filter_orc_file_dump.q.out ql/src/test/results/clientpositive/acid_bloom_filter_orc_file_dump.q.out index da805b0f1c..d309b1640a 100644 --- ql/src/test/results/clientpositive/acid_bloom_filter_orc_file_dump.q.out +++ ql/src/test/results/clientpositive/acid_bloom_filter_orc_file_dump.q.out @@ -23,7 +23,7 @@ TBLPROPERTIES ( ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@bloomTest +PREHOOK: Output: default@bloomtest POSTHOOK: query: CREATE TABLE bloomTest( msisdn STRING, imsi VARCHAR(20), @@ -45,7 +45,7 @@ TBLPROPERTIES ( ) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@bloomTest +POSTHOOK: Output: default@bloomtest PREHOOK: query: INSERT INTO bloomTest VALUES ('12345', '12345', 12345, 12345) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/allow_change_col_type_par.q.out ql/src/test/results/clientpositive/allow_change_col_type_par.q.out index d1905e24c7..85f30d1dfd 100644 --- ql/src/test/results/clientpositive/allow_change_col_type_par.q.out +++ ql/src/test/results/clientpositive/allow_change_col_type_par.q.out @@ -24,7 +24,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Change Column - table name: default.t1_n14 + table name: hive.default.t1_n14 new column name: c1 new column type: smallint old column name: c1 diff --git ql/src/test/results/clientpositive/alter_change_db_location.q.out ql/src/test/results/clientpositive/alter_change_db_location.q.out index 93e46782e5..a8184c7513 100644 --- ql/src/test/results/clientpositive/alter_change_db_location.q.out +++ ql/src/test/results/clientpositive/alter_change_db_location.q.out @@ -21,11 +21,11 @@ POSTHOOK: Input: database:newdb PREHOOK: query: create table tab_n13 (name string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:newdb -PREHOOK: Output: newDB@tab_n13 +PREHOOK: Output: newdb@tab_n13 POSTHOOK: query: create table tab_n13 (name string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:newdb -POSTHOOK: Output: newDB@tab_n13 +POSTHOOK: Output: newdb@tab_n13 PREHOOK: query: alter table tab_n13 rename to newName PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: newdb@tab_n13 @@ -33,5 +33,5 @@ PREHOOK: Output: newdb@tab_n13 POSTHOOK: query: alter table tab_n13 rename to newName POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: newdb@tab_n13 -POSTHOOK: Output: newDB@newName +POSTHOOK: Output: newdb@newname POSTHOOK: Output: newdb@tab_n13 diff --git ql/src/test/results/clientpositive/alter_view_as_select.q.out ql/src/test/results/clientpositive/alter_view_as_select.q.out index b024443542..a1eda5a36f 100644 --- ql/src/test/results/clientpositive/alter_view_as_select.q.out +++ ql/src/test/results/clientpositive/alter_view_as_select.q.out @@ -8,16 +8,16 @@ PREHOOK: query: CREATE VIEW tv.testView as SELECT * FROM srcpart PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:tv -PREHOOK: Output: tv@testView +PREHOOK: Output: tv@testview POSTHOOK: query: CREATE VIEW tv.testView as SELECT * FROM srcpart POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:tv -POSTHOOK: Output: tv@testView -POSTHOOK: Lineage: testView.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] -POSTHOOK: Lineage: testView.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] -POSTHOOK: Lineage: testView.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: testView.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: tv@testview +POSTHOOK: Lineage: testview.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: testview.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: testview.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: testview.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE PREHOOK: Input: tv@testview @@ -55,12 +55,12 @@ PREHOOK: query: ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:tv -PREHOOK: Output: tv@testView +PREHOOK: Output: tv@testview POSTHOOK: query: ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:tv -POSTHOOK: Output: tv@testView +POSTHOOK: Output: tv@testview PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE PREHOOK: Input: tv@testview @@ -99,7 +99,7 @@ LIMIT 10 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:tv -PREHOOK: Output: tv@testView +PREHOOK: Output: tv@testview POSTHOOK: query: ALTER VIEW tv.testView AS SELECT * FROM src WHERE key > 80 AND key < 100 @@ -108,7 +108,7 @@ LIMIT 10 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:tv -POSTHOOK: Output: tv@testView +POSTHOOK: Output: tv@testview PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE PREHOOK: Input: tv@testview diff --git ql/src/test/results/clientpositive/annotate_stats_table.q.out ql/src/test/results/clientpositive/annotate_stats_table.q.out index 9b9e31b214..38b3c6f93c 100644 --- ql/src/test/results/clientpositive/annotate_stats_table.q.out +++ ql/src/test/results/clientpositive/annotate_stats_table.q.out @@ -369,10 +369,10 @@ STAGE PLANS: Stage: Stage-7 Create Table columns: _c0 int - name: default.tmp_n0 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.tmp_n0 Stage: Stage-2 Map Reduce diff --git ql/src/test/results/clientpositive/annotate_stats_udtf.q.out ql/src/test/results/clientpositive/annotate_stats_udtf.q.out index c094fc1fea..b757a27568 100644 --- ql/src/test/results/clientpositive/annotate_stats_udtf.q.out +++ ql/src/test/results/clientpositive/annotate_stats_udtf.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table HIVE_20262 (a array) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@HIVE_20262 +PREHOOK: Output: default@hive_20262 POSTHOOK: query: create table HIVE_20262 (a array) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@HIVE_20262 +POSTHOOK: Output: default@hive_20262 PREHOOK: query: insert into HIVE_20262 select array(1) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/avro_comments.q.out ql/src/test/results/clientpositive/avro_comments.q.out index fbd472771e..135e66ed0e 100644 --- ql/src/test/results/clientpositive/avro_comments.q.out +++ ql/src/test/results/clientpositive/avro_comments.q.out @@ -38,7 +38,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments1 +PREHOOK: Output: default@testavrocomments1 POSTHOOK: query: CREATE TABLE testAvroComments1 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' @@ -75,7 +75,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments1 +POSTHOOK: Output: default@testavrocomments1 PREHOOK: query: DESCRIBE testAvroComments1 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments1 @@ -132,7 +132,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments2 +PREHOOK: Output: default@testavrocomments2 POSTHOOK: query: CREATE TABLE testAvroComments2 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' @@ -167,7 +167,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments2 +POSTHOOK: Output: default@testavrocomments2 PREHOOK: query: DESCRIBE testAvroComments2 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments2 @@ -222,7 +222,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments3 +PREHOOK: Output: default@testavrocomments3 POSTHOOK: query: CREATE TABLE testAvroComments3 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' @@ -255,7 +255,7 @@ TBLPROPERTIES ('avro.schema.literal'='{ }') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments3 +POSTHOOK: Output: default@testavrocomments3 PREHOOK: query: DESCRIBE testAvroComments3 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments3 @@ -286,7 +286,7 @@ PREHOOK: query: CREATE TABLE testAvroComments4 ( STORED AS AVRO PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments4 +PREHOOK: Output: default@testavrocomments4 POSTHOOK: query: CREATE TABLE testAvroComments4 ( number int COMMENT "Order of playing the role", first_name string COMMENT "first name of actor playing role", @@ -295,7 +295,7 @@ POSTHOOK: query: CREATE TABLE testAvroComments4 ( STORED AS AVRO POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments4 +POSTHOOK: Output: default@testavrocomments4 PREHOOK: query: DESCRIBE testAvroComments4 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments4 @@ -326,7 +326,7 @@ PREHOOK: query: CREATE TABLE testAvroComments5 ( STORED AS AVRO PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments5 +PREHOOK: Output: default@testavrocomments5 POSTHOOK: query: CREATE TABLE testAvroComments5 ( number int COMMENT "Order of playing the role", first_name string, @@ -335,7 +335,7 @@ POSTHOOK: query: CREATE TABLE testAvroComments5 ( STORED AS AVRO POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments5 +POSTHOOK: Output: default@testavrocomments5 PREHOOK: query: DESCRIBE testAvroComments5 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments5 @@ -366,7 +366,7 @@ PREHOOK: query: CREATE TABLE testAvroComments6 ( STORED AS AVRO PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testAvroComments6 +PREHOOK: Output: default@testavrocomments6 POSTHOOK: query: CREATE TABLE testAvroComments6 ( number int, first_name string, @@ -375,7 +375,7 @@ POSTHOOK: query: CREATE TABLE testAvroComments6 ( STORED AS AVRO POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testAvroComments6 +POSTHOOK: Output: default@testavrocomments6 PREHOOK: query: DESCRIBE testAvroComments6 PREHOOK: type: DESCTABLE PREHOOK: Input: default@testavrocomments6 diff --git ql/src/test/results/clientpositive/case_sensitivity.q.out ql/src/test/results/clientpositive/case_sensitivity.q.out index fdcf86b1da..caf872ff41 100644 --- ql/src/test/results/clientpositive/case_sensitivity.q.out +++ ql/src/test/results/clientpositive/case_sensitivity.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE DEST1_n129(Key INT, VALUE STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n129 +PREHOOK: Output: default@dest1_n129 POSTHOOK: query: CREATE TABLE DEST1_n129(Key INT, VALUE STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n129 +POSTHOOK: Output: default@dest1_n129 PREHOOK: query: EXPLAIN FROM SRC_THRIFT INSERT OVERWRITE TABLE dest1_n129 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0 diff --git ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out index f445ba3d3d..086a62b721 100644 --- ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out +++ ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table A_n18 (key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@A_n18 +PREHOOK: Output: default@a_n18 POSTHOOK: query: create table A_n18 (key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n18 +POSTHOOK: Output: default@a_n18 PREHOOK: query: insert into A_n18 select * from src PREHOOK: type: QUERY @@ -21,11 +21,11 @@ POSTHOOK: Lineage: a_n18.value SIMPLE [(src)src.FieldSchema(name:value, type:str PREHOOK: query: create table B_n14 (key string, value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@B_n14 +PREHOOK: Output: default@b_n14 POSTHOOK: query: create table B_n14 (key string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n14 +POSTHOOK: Output: default@b_n14 PREHOOK: query: insert into B_n14 select * from src order by key limit 10 diff --git ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out index b72c13ad22..617dda97c8 100644 --- ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out +++ ql/src/test/results/clientpositive/column_pruner_multiple_children.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE DEST1_n52(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n52 +PREHOOK: Output: default@dest1_n52 POSTHOOK: query: CREATE TABLE DEST1_n52(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n52 +POSTHOOK: Output: default@dest1_n52 PREHOOK: query: create table s_n129 as select * from src where key='10' PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out index 09ebed0c0a..ee82e76631 100644 --- ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out +++ ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out @@ -125,11 +125,11 @@ NULL NULL NULL 0 NULL PREHOOK: query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@columnShortcutTable +PREHOOK: Output: default@columnshortcuttable POSTHOOK: query: CREATE table columnShortcutTable (key STRING, value STRING) STORED AS RCFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@columnShortcutTable +POSTHOOK: Output: default@columnshortcuttable PREHOOK: query: FROM src INSERT OVERWRITE TABLE columnShortcutTable SELECT src.key, src.value LIMIT 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/columnstats_partlvl.q.out ql/src/test/results/clientpositive/columnstats_partlvl.q.out index f12577c3c2..df1ffd1f1b 100644 --- ql/src/test/results/clientpositive/columnstats_partlvl.q.out +++ ql/src/test/results/clientpositive/columnstats_partlvl.q.out @@ -6,12 +6,12 @@ PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_Part +PREHOOK: Output: default@employee_part POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_Part +POSTHOOK: Output: default@employee_part PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0) PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out index e2946a227e..da04cd1e74 100644 --- ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out +++ ql/src/test/results/clientpositive/columnstats_partlvl_dp.q.out @@ -6,12 +6,12 @@ PREHOOK: query: CREATE TABLE Employee_Part_n0(employeeID int, employeeName Strin row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_Part_n0 +PREHOOK: Output: default@employee_part_n0 POSTHOOK: query: CREATE TABLE Employee_Part_n0(employeeID int, employeeName String) partitioned by (employeeSalary double, country string) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_Part_n0 +POSTHOOK: Output: default@employee_part_n0 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_Part_n0 partition(employeeSalary='2000.0', country='USA') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -637,12 +637,12 @@ PREHOOK: query: CREATE TABLE Employee_n0(employeeID int, employeeName String) pa row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@Employee_n0 +PREHOOK: Output: default@employee_n0 POSTHOOK: query: CREATE TABLE Employee_n0(employeeID int, employeeName String) partitioned by (employeeSalary double, country string) row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@Employee_n0 +POSTHOOK: Output: default@employee_n0 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee_n0 partition(employeeSalary='2000.0', country='USA') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/columnstats_tbllvl.q.out ql/src/test/results/clientpositive/columnstats_tbllvl.q.out index f22d15c2d3..a2c502662a 100644 --- ql/src/test/results/clientpositive/columnstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/columnstats_tbllvl.q.out @@ -15,7 +15,7 @@ PREHOOK: query: CREATE TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@UserVisits_web_text_none +PREHOOK: Output: default@uservisits_web_text_none POSTHOOK: query: CREATE TABLE UserVisits_web_text_none ( sourceIP string, destURL string, @@ -29,7 +29,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_web_text_none ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@UserVisits_web_text_none +POSTHOOK: Output: default@uservisits_web_text_none PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none PREHOOK: type: LOAD #### A masked pattern was here #### @@ -514,7 +514,7 @@ PREHOOK: query: CREATE TABLE UserVisits_in_dummy_db ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:dummydb -PREHOOK: Output: dummydb@UserVisits_in_dummy_db +PREHOOK: Output: dummydb@uservisits_in_dummy_db POSTHOOK: query: CREATE TABLE UserVisits_in_dummy_db ( sourceIP string, destURL string, @@ -528,7 +528,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_in_dummy_db ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:dummydb -POSTHOOK: Output: dummydb@UserVisits_in_dummy_db +POSTHOOK: Output: dummydb@uservisits_in_dummy_db PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_in_dummy_db PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/correlationoptimizer5.q.out ql/src/test/results/clientpositive/correlationoptimizer5.q.out index 2e9e6027ae..2a19232fdd 100644 --- ql/src/test/results/clientpositive/correlationoptimizer5.q.out +++ ql/src/test/results/clientpositive/correlationoptimizer5.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n19(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n19 +PREHOOK: Output: default@t1_n19 POSTHOOK: query: CREATE TABLE T1_n19(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n19 +POSTHOOK: Output: default@t1_n19 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE T1_n19 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -17,11 +17,11 @@ POSTHOOK: Output: default@t1_n19 PREHOOK: query: CREATE TABLE T2_n11(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n11 +PREHOOK: Output: default@t2_n11 POSTHOOK: query: CREATE TABLE T2_n11(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n11 +POSTHOOK: Output: default@t2_n11 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv2.txt' INTO TABLE T2_n11 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -33,11 +33,11 @@ POSTHOOK: Output: default@t2_n11 PREHOOK: query: CREATE TABLE T3_n5(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n5 +PREHOOK: Output: default@t3_n5 POSTHOOK: query: CREATE TABLE T3_n5(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n5 +POSTHOOK: Output: default@t3_n5 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv3.txt' INTO TABLE T3_n5 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -49,11 +49,11 @@ POSTHOOK: Output: default@t3_n5 PREHOOK: query: CREATE TABLE T4_n1(key INT, val STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n1 +PREHOOK: Output: default@t4_n1 POSTHOOK: query: CREATE TABLE T4_n1(key INT, val STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n1 +POSTHOOK: Output: default@t4_n1 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE T4_n1 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/create_like.q.out ql/src/test/results/clientpositive/create_like.q.out index 8867dc19d5..2fb9877695 100644 --- ql/src/test/results/clientpositive/create_like.q.out +++ ql/src/test/results/clientpositive/create_like.q.out @@ -465,19 +465,19 @@ Storage Desc Params: PREHOOK: query: CREATE TABLE PropertiedParquetTable(a INT, b STRING) STORED AS PARQUET TBLPROPERTIES("parquet.compression"="LZO") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@PropertiedParquetTable +PREHOOK: Output: default@propertiedparquettable POSTHOOK: query: CREATE TABLE PropertiedParquetTable(a INT, b STRING) STORED AS PARQUET TBLPROPERTIES("parquet.compression"="LZO") POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@PropertiedParquetTable +POSTHOOK: Output: default@propertiedparquettable PREHOOK: query: CREATE TABLE LikePropertiedParquetTable LIKE PropertiedParquetTable PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@LikePropertiedParquetTable +PREHOOK: Output: default@likepropertiedparquettable POSTHOOK: query: CREATE TABLE LikePropertiedParquetTable LIKE PropertiedParquetTable POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@LikePropertiedParquetTable +POSTHOOK: Output: default@likepropertiedparquettable PREHOOK: query: DESCRIBE FORMATTED LikePropertiedParquetTable PREHOOK: type: DESCTABLE PREHOOK: Input: default@likepropertiedparquettable diff --git ql/src/test/results/clientpositive/create_view.q.out ql/src/test/results/clientpositive/create_view.q.out index 7414d4749d..768006265d 100644 --- ql/src/test/results/clientpositive/create_view.q.out +++ ql/src/test/results/clientpositive/create_view.q.out @@ -175,7 +175,7 @@ STAGE PLANS: Create View columns: valoo string expanded text: SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `default`.`src` WHERE `src`.`key`=86) `view0` - name: default.view0 + name: hive.default.view0 original text: SELECT upper(value) FROM src WHERE key=86 PREHOOK: query: EXPLAIN diff --git ql/src/test/results/clientpositive/create_with_constraints.q.out ql/src/test/results/clientpositive/create_with_constraints.q.out index ba317581d5..3b9a2279db 100644 --- ql/src/test/results/clientpositive/create_with_constraints.q.out +++ ql/src/test/results/clientpositive/create_with_constraints.q.out @@ -2293,12 +2293,12 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:dbconstraint PREHOOK: query: CREATE TABLE Table2 (a STRING, b STRING NOT NULL DISABLE, CONSTRAINT Pk1 PRIMARY KEY (a) DISABLE) PREHOOK: type: CREATETABLE -PREHOOK: Output: DbConstraint@Table2 PREHOOK: Output: database:dbconstraint +PREHOOK: Output: dbconstraint@table2 POSTHOOK: query: CREATE TABLE Table2 (a STRING, b STRING NOT NULL DISABLE, CONSTRAINT Pk1 PRIMARY KEY (a) DISABLE) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: DbConstraint@Table2 POSTHOOK: Output: database:dbconstraint +POSTHOOK: Output: dbconstraint@table2 PREHOOK: query: USE default PREHOOK: type: SWITCHDATABASE PREHOOK: Input: database:default @@ -2599,12 +2599,12 @@ PREHOOK: query: CREATE TABLE numericDataType_n0(a TINYINT, b SMALLINT NOT NULL E d BIGINT , e DOUBLE , f DECIMAL(9,2)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@numericDataType_n0 +PREHOOK: Output: default@numericdatatype_n0 POSTHOOK: query: CREATE TABLE numericDataType_n0(a TINYINT, b SMALLINT NOT NULL ENABLE, c INT, d BIGINT , e DOUBLE , f DECIMAL(9,2)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@numericDataType_n0 +POSTHOOK: Output: default@numericdatatype_n0 PREHOOK: query: INSERT INTO numericDataType_n0 values(2,45,5667,67890,5.6,678.5) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/cross_product_check_1.q.out ql/src/test/results/clientpositive/cross_product_check_1.q.out index 7e99c034e6..d8791a3933 100644 --- ql/src/test/results/clientpositive/cross_product_check_1.q.out +++ ql/src/test/results/clientpositive/cross_product_check_1.q.out @@ -3,13 +3,13 @@ select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@A_n8 +PREHOOK: Output: default@a_n8 POSTHOOK: query: create table A_n8 as select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n8 +POSTHOOK: Output: default@a_n8 POSTHOOK: Lineage: a_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: a_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table B_n6 as @@ -18,14 +18,14 @@ limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@B_n6 +PREHOOK: Output: default@b_n6 POSTHOOK: query: create table B_n6 as select * from src limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n6 +POSTHOOK: Output: default@b_n6 POSTHOOK: Lineage: b_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: b_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product diff --git ql/src/test/results/clientpositive/cross_product_check_2.q.out ql/src/test/results/clientpositive/cross_product_check_2.q.out index e1b6f3c5fc..3452e95d6e 100644 --- ql/src/test/results/clientpositive/cross_product_check_2.q.out +++ ql/src/test/results/clientpositive/cross_product_check_2.q.out @@ -3,13 +3,13 @@ select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@A_n2 +PREHOOK: Output: default@a_n2 POSTHOOK: query: create table A_n2 as select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n2 +POSTHOOK: Output: default@a_n2 POSTHOOK: Lineage: a_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: a_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table B_n2 as @@ -18,14 +18,14 @@ limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@B_n2 +PREHOOK: Output: default@b_n2 POSTHOOK: query: create table B_n2 as select * from src order by key limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n2 +POSTHOOK: Output: default@b_n2 POSTHOOK: Lineage: b_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: b_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product diff --git ql/src/test/results/clientpositive/ctas.q.out ql/src/test/results/clientpositive/ctas.q.out index 23cc8b967e..2b1a0642e5 100644 --- ql/src/test/results/clientpositive/ctas.q.out +++ ql/src/test/results/clientpositive/ctas.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table nzhang_Tmp(a int, b string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_Tmp +PREHOOK: Output: default@nzhang_tmp POSTHOOK: query: create table nzhang_Tmp(a int, b string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_Tmp +POSTHOOK: Output: default@nzhang_tmp PREHOOK: query: select * from nzhang_Tmp PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_tmp @@ -18,12 +18,12 @@ PREHOOK: query: explain create table nzhang_CTAS1 as select key k, value from sr PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1 +PREHOOK: Output: default@nzhang_ctas1 POSTHOOK: query: explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1 +POSTHOOK: Output: default@nzhang_ctas1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-2 depends on stages: Stage-1 @@ -91,7 +91,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_CTAS1 + name: default.nzhang_ctas1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -118,10 +118,10 @@ STAGE PLANS: Stage: Stage-5 Create Table columns: k string, value string - name: default.nzhang_CTAS1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas1 Stage: Stage-3 Stats Work @@ -129,7 +129,7 @@ STAGE PLANS: Column Stats Desc: Columns: k, value Column Types: string, string - Table: default.nzhang_CTAS1 + Table: default.nzhang_ctas1 Stage: Stage-4 Map Reduce @@ -159,12 +159,12 @@ PREHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort b PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1 +PREHOOK: Output: default@nzhang_ctas1 POSTHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1 +POSTHOOK: Output: default@nzhang_ctas1 POSTHOOK: Lineage: nzhang_ctas1.k SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_ctas1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from nzhang_CTAS1 @@ -324,10 +324,10 @@ STAGE PLANS: Stage: Stage-5 Create Table columns: key string, value string - name: default.nzhang_ctas2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas2 Stage: Stage-3 Stats Work @@ -530,10 +530,10 @@ STAGE PLANS: Stage: Stage-5 Create Table columns: half_key double, conb string - name: default.nzhang_ctas3 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde name: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: hive.default.nzhang_ctas3 Stage: Stage-3 Stats Work @@ -800,11 +800,11 @@ STAGE PLANS: Stage: Stage-5 Create Table columns: key string, value string - name: default.nzhang_ctas4 field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas4 Stage: Stage-3 Stats Work @@ -1008,13 +1008,13 @@ STAGE PLANS: Stage: Stage-5 Create Table columns: key string, value string - name: default.nzhang_ctas5 field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat line delimiter: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas5 Stage: Stage-3 Stats Work diff --git ql/src/test/results/clientpositive/dbtxnmgr_compact1.q.out ql/src/test/results/clientpositive/dbtxnmgr_compact1.q.out index cebcb15981..572107c84a 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_compact1.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_compact1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n153(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n153 +PREHOOK: Output: default@t1_n153 POSTHOOK: query: create table T1_n153(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n153 +POSTHOOK: Output: default@t1_n153 PREHOOK: query: alter table T1_n153 compact 'major' PREHOOK: type: ALTERTABLE_COMPACT POSTHOOK: query: alter table T1_n153 compact 'major' diff --git ql/src/test/results/clientpositive/dbtxnmgr_compact2.q.out ql/src/test/results/clientpositive/dbtxnmgr_compact2.q.out index 9d744e2c61..3be5d2f281 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_compact2.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_compact2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n105(key string, val string) partitioned by (ds string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n105 +PREHOOK: Output: default@t1_n105 POSTHOOK: query: create table T1_n105(key string, val string) partitioned by (ds string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n105 +POSTHOOK: Output: default@t1_n105 PREHOOK: query: alter table T1_n105 add partition (ds = 'today') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: default@t1_n105 @@ -37,7 +37,7 @@ STAGE PLANS: compaction type: minor partition spec: ds yesterday - table name: default.T1_n105 + table name: default.t1_n105 PREHOOK: query: alter table T1_n105 partition (ds = 'yesterday') compact 'minor' PREHOOK: type: ALTERTABLE_COMPACT diff --git ql/src/test/results/clientpositive/dbtxnmgr_compact3.q.out ql/src/test/results/clientpositive/dbtxnmgr_compact3.q.out index 707548562f..74d56b230c 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_compact3.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_compact3.q.out @@ -12,11 +12,11 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:d1 PREHOOK: query: create table T1_n71(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') PREHOOK: type: CREATETABLE -PREHOOK: Output: D1@T1_n71 +PREHOOK: Output: d1@t1_n71 PREHOOK: Output: database:d1 POSTHOOK: query: create table T1_n71(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') POSTHOOK: type: CREATETABLE -POSTHOOK: Output: D1@T1_n71 +POSTHOOK: Output: d1@t1_n71 POSTHOOK: Output: database:d1 PREHOOK: query: alter table T1_n71 compact 'major' PREHOOK: type: ALTERTABLE_COMPACT diff --git ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out index b312cb869b..1dda1b088a 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out @@ -21,19 +21,19 @@ POSTHOOK: Output: database:d1 PREHOOK: query: create table T1_n50(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n50 +PREHOOK: Output: default@t1_n50 POSTHOOK: query: create table T1_n50(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n50 +POSTHOOK: Output: default@t1_n50 PREHOOK: query: create table T2_n31 like T1_n50 PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n31 +PREHOOK: Output: default@t2_n31 POSTHOOK: query: create table T2_n31 like T1_n50 POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n31 +POSTHOOK: Output: default@t2_n31 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n50 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -60,12 +60,12 @@ PREHOOK: query: create table T3_n11 as select * from T1_n50 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@t1_n50 PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n11 +PREHOOK: Output: default@t3_n11 POSTHOOK: query: create table T3_n11 as select * from T1_n50 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@t1_n50 POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n11 +POSTHOOK: Output: default@t3_n11 POSTHOOK: Lineage: t3_n11.key SIMPLE [(t1_n50)t1_n50.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t3_n11.val SIMPLE [(t1_n50)t1_n50.FieldSchema(name:val, type:string, comment:null), ] PREHOOK: query: create table T4_n4 (key char(10), val decimal(5,2), b int) @@ -74,14 +74,14 @@ PREHOOK: query: create table T4_n4 (key char(10), val decimal(5,2), b int) stored as orc PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n4 +PREHOOK: Output: default@t4_n4 POSTHOOK: query: create table T4_n4 (key char(10), val decimal(5,2), b int) partitioned by (ds string) clustered by (b) into 10 buckets stored as orc POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n4 +POSTHOOK: Output: default@t4_n4 PREHOOK: query: alter table T3_n11 rename to newT3_n11 PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: default@t3_n11 @@ -89,7 +89,7 @@ PREHOOK: Output: default@t3_n11 POSTHOOK: query: alter table T3_n11 rename to newT3_n11 POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: default@t3_n11 -POSTHOOK: Output: default@newT3_n11 +POSTHOOK: Output: default@newt3_n11 POSTHOOK: Output: default@t3_n11 PREHOOK: query: alter table T2_n31 set tblproperties ('test'='thisisatest') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -158,11 +158,11 @@ POSTHOOK: Output: default@t4_n4@ds=tomorrow PREHOOK: query: create table T5_n1 (a string, b int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T5_n1 +PREHOOK: Output: default@t5_n1 POSTHOOK: query: create table T5_n1 (a string, b int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T5_n1 +POSTHOOK: Output: default@t5_n1 PREHOOK: query: alter table T5_n1 set fileformat orc PREHOOK: type: ALTERTABLE_FILEFORMAT PREHOOK: Input: default@t5_n1 @@ -174,11 +174,11 @@ POSTHOOK: Output: default@t5_n1 PREHOOK: query: create table T7_n2 (a string, b int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T7_n2 +PREHOOK: Output: default@t7_n2 POSTHOOK: query: create table T7_n2 (a string, b int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T7_n2 +POSTHOOK: Output: default@t7_n2 #### A masked pattern was here #### PREHOOK: type: ALTERTABLE_LOCATION PREHOOK: Input: default@t7_n2 @@ -218,13 +218,13 @@ PREHOOK: query: create view V1_n5 as select key from T1_n50 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@t1_n50 PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n5 +PREHOOK: Output: default@v1_n5 POSTHOOK: query: create view V1_n5 as select key from T1_n50 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@t1_n50 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n5 -POSTHOOK: Lineage: V1_n5.key SIMPLE [(t1_n50)t1_n50.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Output: default@v1_n5 +POSTHOOK: Lineage: v1_n5.key SIMPLE [(t1_n50)t1_n50.FieldSchema(name:key, type:string, comment:null), ] PREHOOK: query: alter view V1_n5 set tblproperties ('test'='thisisatest') PREHOOK: type: ALTERVIEW_PROPERTIES PREHOOK: Input: default@v1_n5 diff --git ql/src/test/results/clientpositive/dbtxnmgr_query1.q.out ql/src/test/results/clientpositive/dbtxnmgr_query1.q.out index 603c0f4126..a2632cb1ba 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_query1.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_query1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n20(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n20 +PREHOOK: Output: default@t1_n20 POSTHOOK: query: create table T1_n20(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n20 +POSTHOOK: Output: default@t1_n20 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n20 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -31,11 +31,11 @@ POSTHOOK: Input: default@t1_n20 PREHOOK: query: create table T2_n12(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n12 +PREHOOK: Output: default@t2_n12 POSTHOOK: query: create table T2_n12(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n12 +POSTHOOK: Output: default@t2_n12 PREHOOK: query: insert into table T2_n12 select * from T1_n20 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n20 diff --git ql/src/test/results/clientpositive/dbtxnmgr_query2.q.out ql/src/test/results/clientpositive/dbtxnmgr_query2.q.out index 43096a7ba9..f5f54cbc07 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_query2.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_query2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n74(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n74 +PREHOOK: Output: default@t1_n74 POSTHOOK: query: create table T1_n74(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n74 +POSTHOOK: Output: default@t1_n74 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n74 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -31,11 +31,11 @@ POSTHOOK: Input: default@t1_n74 PREHOOK: query: create table T2_n45(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n45 +PREHOOK: Output: default@t2_n45 POSTHOOK: query: create table T2_n45(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n45 +POSTHOOK: Output: default@t2_n45 PREHOOK: query: insert overwrite table T2_n45 select * from T1_n74 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n74 diff --git ql/src/test/results/clientpositive/dbtxnmgr_query3.q.out ql/src/test/results/clientpositive/dbtxnmgr_query3.q.out index 5af2f1c0d6..8bb1eabb77 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_query3.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_query3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n111(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n111 +PREHOOK: Output: default@t1_n111 POSTHOOK: query: create table T1_n111(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n111 +POSTHOOK: Output: default@t1_n111 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n111 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -31,11 +31,11 @@ POSTHOOK: Input: default@t1_n111 PREHOOK: query: create table T2_n67(key string, val string) partitioned by (pval string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n67 +PREHOOK: Output: default@t2_n67 POSTHOOK: query: create table T2_n67(key string, val string) partitioned by (pval string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n67 +POSTHOOK: Output: default@t2_n67 PREHOOK: query: insert into table T2_n67 partition (pval = '1') select * from T1_n111 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n111 diff --git ql/src/test/results/clientpositive/dbtxnmgr_query4.q.out ql/src/test/results/clientpositive/dbtxnmgr_query4.q.out index dd7b6294c0..3ba2bc65cb 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_query4.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_query4.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table T1_n163(key string, val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n163 +PREHOOK: Output: default@t1_n163 POSTHOOK: query: create table T1_n163(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n163 +POSTHOOK: Output: default@t1_n163 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n163 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -31,11 +31,11 @@ POSTHOOK: Input: default@t1_n163 PREHOOK: query: create table T2_n95(key string) partitioned by (val string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n95 +PREHOOK: Output: default@t2_n95 POSTHOOK: query: create table T2_n95(key string) partitioned by (val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n95 +POSTHOOK: Output: default@t2_n95 PREHOOK: query: insert overwrite table T2_n95 partition (val) select key, val from T1_n163 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n163 diff --git ql/src/test/results/clientpositive/dbtxnmgr_query5.q.out ql/src/test/results/clientpositive/dbtxnmgr_query5.q.out index ea76a37512..2938d4628e 100644 --- ql/src/test/results/clientpositive/dbtxnmgr_query5.q.out +++ ql/src/test/results/clientpositive/dbtxnmgr_query5.q.out @@ -13,11 +13,11 @@ POSTHOOK: Input: database:foo PREHOOK: query: create table T1_n40(key string, val string) partitioned by (ds string) stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:foo -PREHOOK: Output: foo@T1_n40 +PREHOOK: Output: foo@t1_n40 POSTHOOK: query: create table T1_n40(key string, val string) partitioned by (ds string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:foo -POSTHOOK: Output: foo@T1_n40 +POSTHOOK: Output: foo@t1_n40 PREHOOK: query: alter table T1_n40 add partition (ds='today') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: foo@t1_n40 @@ -29,13 +29,13 @@ PREHOOK: query: create view V1_n3 as select key from T1_n40 PREHOOK: type: CREATEVIEW PREHOOK: Input: foo@t1_n40 PREHOOK: Output: database:foo -PREHOOK: Output: foo@V1_n3 +PREHOOK: Output: foo@v1_n3 POSTHOOK: query: create view V1_n3 as select key from T1_n40 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: foo@t1_n40 POSTHOOK: Output: database:foo -POSTHOOK: Output: foo@V1_n3 -POSTHOOK: Lineage: V1_n3.key SIMPLE [(t1_n40)t1_n40.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Output: foo@v1_n3 +POSTHOOK: Lineage: v1_n3.key SIMPLE [(t1_n40)t1_n40.FieldSchema(name:key, type:string, comment:null), ] PREHOOK: query: show tables PREHOOK: type: SHOWTABLES PREHOOK: Input: database:foo diff --git ql/src/test/results/clientpositive/decimal_10_0.q.out ql/src/test/results/clientpositive/decimal_10_0.q.out index fb65b1b7b2..0035f8c983 100644 --- ql/src/test/results/clientpositive/decimal_10_0.q.out +++ ql/src/test/results/clientpositive/decimal_10_0.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE `DECIMAL_n0` (`dec` decimal) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_n0 +PREHOOK: Output: default@decimal_n0 POSTHOOK: query: CREATE TABLE `DECIMAL_n0` (`dec` decimal) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_n0 +POSTHOOK: Output: default@decimal_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/decimal_10_0.txt' OVERWRITE INTO TABLE `DECIMAL_n0` PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/decimal_3.q.out ql/src/test/results/clientpositive/decimal_3.q.out index d2e39571fd..40f7a52d4b 100644 --- ql/src/test/results/clientpositive/decimal_3.q.out +++ ql/src/test/results/clientpositive/decimal_3.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_3 +PREHOOK: Output: default@decimal_3 POSTHOOK: query: CREATE TABLE DECIMAL_3(key decimal(38,18), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_3 +POSTHOOK: Output: default@decimal_3 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/decimal_4.q.out ql/src/test/results/clientpositive/decimal_4.q.out index 9d3ee84f3b..6d04491bc2 100644 --- ql/src/test/results/clientpositive/decimal_4.q.out +++ ql/src/test/results/clientpositive/decimal_4.q.out @@ -12,24 +12,24 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_4_1 +PREHOOK: Output: default@decimal_4_1 POSTHOOK: query: CREATE TABLE DECIMAL_4_1(key decimal(35,25), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_4_1 +POSTHOOK: Output: default@decimal_4_1 PREHOOK: query: CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) STORED AS ORC PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_4_2 +PREHOOK: Output: default@decimal_4_2 POSTHOOK: query: CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) STORED AS ORC POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_4_2 +POSTHOOK: Output: default@decimal_4_2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/decimal_5.q.out ql/src/test/results/clientpositive/decimal_5.q.out index f24588c5cb..a54a4d46db 100644 --- ql/src/test/results/clientpositive/decimal_5.q.out +++ ql/src/test/results/clientpositive/decimal_5.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_5_n0 +PREHOOK: Output: default@decimal_5_n0 POSTHOOK: query: CREATE TABLE DECIMAL_5_n0(key decimal(10,5), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_5_n0 +POSTHOOK: Output: default@decimal_5_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_5_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/decimal_6.q.out ql/src/test/results/clientpositive/decimal_6.q.out index 83cadcef8d..2be969911a 100644 --- ql/src/test/results/clientpositive/decimal_6.q.out +++ ql/src/test/results/clientpositive/decimal_6.q.out @@ -16,28 +16,28 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_1_n0 +PREHOOK: Output: default@decimal_6_1_n0 POSTHOOK: query: CREATE TABLE DECIMAL_6_1_n0(key decimal(10,5), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_1_n0 +POSTHOOK: Output: default@decimal_6_1_n0 PREHOOK: query: CREATE TABLE DECIMAL_6_2_n0(key decimal(17,4), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_2_n0 +PREHOOK: Output: default@decimal_6_2_n0 POSTHOOK: query: CREATE TABLE DECIMAL_6_2_n0(key decimal(17,4), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_2_n0 +POSTHOOK: Output: default@decimal_6_2_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv9.txt' INTO TABLE DECIMAL_6_1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -130,12 +130,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_6_3_n0 AS SELECT key + 5.5 AS k, value * 11 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_6_1_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_6_3_n0 +PREHOOK: Output: default@decimal_6_3_n0 POSTHOOK: query: CREATE TABLE DECIMAL_6_3_n0 AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1_n0 ORDER BY v POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_6_1_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_6_3_n0 +POSTHOOK: Output: default@decimal_6_3_n0 POSTHOOK: Lineage: decimal_6_3_n0.k EXPRESSION [(decimal_6_1_n0)decimal_6_1_n0.FieldSchema(name:key, type:decimal(10,5), comment:null), ] POSTHOOK: Lineage: decimal_6_3_n0.v EXPRESSION [(decimal_6_1_n0)decimal_6_1_n0.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: desc DECIMAL_6_3_n0 diff --git ql/src/test/results/clientpositive/decimal_join2.q.out ql/src/test/results/clientpositive/decimal_join2.q.out index 59d0672132..8fbbb6dbed 100644 --- ql/src/test/results/clientpositive/decimal_join2.q.out +++ ql/src/test/results/clientpositive/decimal_join2.q.out @@ -12,14 +12,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_3_txt +PREHOOK: Output: default@decimal_3_txt POSTHOOK: query: CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_3_txt +POSTHOOK: Output: default@decimal_3_txt PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt PREHOOK: type: LOAD #### A masked pattern was here #### @@ -32,12 +32,12 @@ PREHOOK: query: CREATE TABLE DECIMAL_3_n0 STORED AS ORC AS SELECT * FROM DECIMAL PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_3_txt PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_3_n0 +PREHOOK: Output: default@decimal_3_n0 POSTHOOK: query: CREATE TABLE DECIMAL_3_n0 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_3_txt POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_3_n0 +POSTHOOK: Output: default@decimal_3_n0 POSTHOOK: Lineage: decimal_3_n0.key SIMPLE [(decimal_3_txt)decimal_3_txt.FieldSchema(name:key, type:decimal(38,18), comment:null), ] POSTHOOK: Lineage: decimal_3_n0.value SIMPLE [(decimal_3_txt)decimal_3_txt.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: EXPLAIN diff --git ql/src/test/results/clientpositive/decimal_precision.q.out ql/src/test/results/clientpositive/decimal_precision.q.out index 179e746b70..e5afaf69e0 100644 --- ql/src/test/results/clientpositive/decimal_precision.q.out +++ ql/src/test/results/clientpositive/decimal_precision.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION_n0 +PREHOOK: Output: default@decimal_precision_n0 POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_n0(`dec` decimal(20,10)) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION_n0 +POSTHOOK: Output: default@decimal_precision_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv8.txt' INTO TABLE DECIMAL_PRECISION_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -645,11 +645,11 @@ POSTHOOK: Output: default@decimal_precision_n0 PREHOOK: query: CREATE TABLE DECIMAL_PRECISION_n0(`dec` decimal(38,18)) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_PRECISION_n0 +PREHOOK: Output: default@decimal_precision_n0 POSTHOOK: query: CREATE TABLE DECIMAL_PRECISION_n0(`dec` decimal(38,18)) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_PRECISION_n0 +POSTHOOK: Output: default@decimal_precision_n0 PREHOOK: query: INSERT INTO DECIMAL_PRECISION_n0 VALUES(98765432109876543210.12345), (98765432109876543210.12345) PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/decimal_serde.q.out ql/src/test/results/clientpositive/decimal_serde.q.out index 81a4db822a..324c286bf7 100644 --- ql/src/test/results/clientpositive/decimal_serde.q.out +++ ql/src/test/results/clientpositive/decimal_serde.q.out @@ -20,14 +20,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_TEXT +PREHOOK: Output: default@decimal_text POSTHOOK: query: CREATE TABLE DECIMAL_TEXT (key decimal, value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_TEXT +POSTHOOK: Output: default@decimal_text PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_TEXT PREHOOK: type: LOAD #### A masked pattern was here #### @@ -88,14 +88,14 @@ SELECT * FROM DECIMAL_TEXT PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_text PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_RC +PREHOOK: Output: default@decimal_rc POSTHOOK: query: CREATE TABLE DECIMAL_RC STORED AS RCFile AS SELECT * FROM DECIMAL_TEXT POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_text POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_RC +POSTHOOK: Output: default@decimal_rc POSTHOOK: Lineage: decimal_rc.key SIMPLE [(decimal_text)decimal_text.FieldSchema(name:key, type:decimal(10,0), comment:null), ] POSTHOOK: Lineage: decimal_rc.value SIMPLE [(decimal_text)decimal_text.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: describe formatted DECIMAL_RC @@ -140,7 +140,7 @@ SELECT * FROM DECIMAL_RC PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_rc PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_LAZY_COL +PREHOOK: Output: default@decimal_lazy_col POSTHOOK: query: CREATE TABLE DECIMAL_LAZY_COL ROW FORMAT SERDE "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" STORED AS RCFile AS @@ -148,7 +148,7 @@ SELECT * FROM DECIMAL_RC POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_rc POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_LAZY_COL +POSTHOOK: Output: default@decimal_lazy_col POSTHOOK: Lineage: decimal_lazy_col.key SIMPLE [(decimal_rc)decimal_rc.FieldSchema(name:key, type:decimal(10,0), comment:null), ] POSTHOOK: Lineage: decimal_lazy_col.value SIMPLE [(decimal_rc)decimal_rc.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: describe formatted DECIMAL_LAZY_COL @@ -196,7 +196,7 @@ SELECT * FROM DECIMAL_LAZY_COL ORDER BY key PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_lazy_col PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_SEQUENCE +PREHOOK: Output: default@decimal_sequence POSTHOOK: query: CREATE TABLE DECIMAL_SEQUENCE ROW FORMAT DELIMITED FIELDS TERMINATED BY '\001' @@ -207,7 +207,7 @@ SELECT * FROM DECIMAL_LAZY_COL ORDER BY key POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@decimal_lazy_col POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_SEQUENCE +POSTHOOK: Output: default@decimal_sequence POSTHOOK: Lineage: decimal_sequence.key SIMPLE [(decimal_lazy_col)decimal_lazy_col.FieldSchema(name:key, type:decimal(10,0), comment:null), ] POSTHOOK: Lineage: decimal_sequence.value SIMPLE [(decimal_lazy_col)decimal_lazy_col.FieldSchema(name:value, type:int, comment:null), ] PREHOOK: query: SELECT * FROM DECIMAL_SEQUENCE ORDER BY key, value diff --git ql/src/test/results/clientpositive/decimal_trailing.q.out ql/src/test/results/clientpositive/decimal_trailing.q.out index 070de2b0ba..0cb622d988 100644 --- ql/src/test/results/clientpositive/decimal_trailing.q.out +++ ql/src/test/results/clientpositive/decimal_trailing.q.out @@ -12,7 +12,7 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_TRAILING_n0 +PREHOOK: Output: default@decimal_trailing_n0 POSTHOOK: query: CREATE TABLE DECIMAL_TRAILING_n0 ( id int, a decimal(10,4), @@ -23,7 +23,7 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_TRAILING_n0 +POSTHOOK: Output: default@decimal_trailing_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv10.txt' INTO TABLE DECIMAL_TRAILING_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/decimal_udf.q.out ql/src/test/results/clientpositive/decimal_udf.q.out index a5bbdc68e9..b3bb08e66b 100644 --- ql/src/test/results/clientpositive/decimal_udf.q.out +++ ql/src/test/results/clientpositive/decimal_udf.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF +PREHOOK: Output: default@decimal_udf POSTHOOK: query: CREATE TABLE DECIMAL_UDF (key decimal(20,10), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF +POSTHOOK: Output: default@decimal_udf PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/decimal_udf2.q.out ql/src/test/results/clientpositive/decimal_udf2.q.out index eb98a07c8e..364d27039d 100644 --- ql/src/test/results/clientpositive/decimal_udf2.q.out +++ ql/src/test/results/clientpositive/decimal_udf2.q.out @@ -8,14 +8,14 @@ ROW FORMAT DELIMITED STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DECIMAL_UDF2 +PREHOOK: Output: default@decimal_udf2 POSTHOOK: query: CREATE TABLE DECIMAL_UDF2 (key decimal(20,10), value int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DECIMAL_UDF2 +POSTHOOK: Output: default@decimal_udf2 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_UDF2 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out index ca7ce6f0e4..d42434e08d 100644 --- ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out +++ ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out @@ -15,7 +15,7 @@ PREHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@UserVisits_web_text_none_n0 +PREHOOK: Output: default@uservisits_web_text_none_n0 POSTHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( sourceIP string, destURL string, @@ -29,7 +29,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@UserVisits_web_text_none_n0 +POSTHOOK: Output: default@uservisits_web_text_none_n0 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -497,7 +497,7 @@ PREHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( row format delimited fields terminated by '|' stored as textfile PREHOOK: type: CREATETABLE PREHOOK: Output: database:test -PREHOOK: Output: test@UserVisits_web_text_none_n0 +PREHOOK: Output: test@uservisits_web_text_none_n0 POSTHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( sourceIP string, destURL string, @@ -511,7 +511,7 @@ POSTHOOK: query: CREATE TABLE UserVisits_web_text_none_n0 ( row format delimited fields terminated by '|' stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:test -POSTHOOK: Output: test@UserVisits_web_text_none_n0 +POSTHOOK: Output: test@uservisits_web_text_none_n0 PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/drop_table_with_stats.q.out ql/src/test/results/clientpositive/drop_table_with_stats.q.out index f8290886a7..b8e39db414 100644 --- ql/src/test/results/clientpositive/drop_table_with_stats.q.out +++ ql/src/test/results/clientpositive/drop_table_with_stats.q.out @@ -39,11 +39,11 @@ POSTHOOK: Output: tblstatsdb1@testtable PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n0 (key STRING, value STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:tblstatsdb1 -PREHOOK: Output: tblstatsdb1@TestTable1_n0 +PREHOOK: Output: tblstatsdb1@testtable1_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n0 (key STRING, value STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:tblstatsdb1 -POSTHOOK: Output: tblstatsdb1@TestTable1_n0 +POSTHOOK: Output: tblstatsdb1@testtable1_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -65,11 +65,11 @@ POSTHOOK: Output: tblstatsdb1@testtable1_n0 PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n0 (key STRING, value STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:tblstatsdb1 -PREHOOK: Output: tblstatsdb1@TESTTABLE2_n0 +PREHOOK: Output: tblstatsdb1@testtable2_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n0 (key STRING, value STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:tblstatsdb1 -POSTHOOK: Output: tblstatsdb1@TESTTABLE2_n0 +POSTHOOK: Output: tblstatsdb1@testtable2_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -134,12 +134,12 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:tblstatsdb2 PREHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: TBLSTATSDB2@testtable PREHOOK: Output: database:tblstatsdb2 +PREHOOK: Output: tblstatsdb2@testtable POSTHOOK: query: CREATE TABLE IF NOT EXISTS testtable (key STRING, value STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: TBLSTATSDB2@testtable POSTHOOK: Output: database:tblstatsdb2 +POSTHOOK: Output: tblstatsdb2@testtable PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE testtable PREHOOK: type: LOAD #### A masked pattern was here #### @@ -160,12 +160,12 @@ POSTHOOK: Input: tblstatsdb2@testtable POSTHOOK: Output: tblstatsdb2@testtable PREHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n0 (key STRING, value STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: TBLSTATSDB2@TestTable1_n0 PREHOOK: Output: database:tblstatsdb2 +PREHOOK: Output: tblstatsdb2@testtable1_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TestTable1_n0 (key STRING, value STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: TBLSTATSDB2@TestTable1_n0 POSTHOOK: Output: database:tblstatsdb2 +POSTHOOK: Output: tblstatsdb2@testtable1_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TestTable1_n0 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -186,12 +186,12 @@ POSTHOOK: Input: tblstatsdb2@testtable1_n0 POSTHOOK: Output: tblstatsdb2@testtable1_n0 PREHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n0 (key STRING, value STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: TBLSTATSDB2@TESTTABLE2_n0 PREHOOK: Output: database:tblstatsdb2 +PREHOOK: Output: tblstatsdb2@testtable2_n0 POSTHOOK: query: CREATE TABLE IF NOT EXISTS TESTTABLE2_n0 (key STRING, value STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: TBLSTATSDB2@TESTTABLE2_n0 POSTHOOK: Output: database:tblstatsdb2 +POSTHOOK: Output: tblstatsdb2@testtable2_n0 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE TESTTABLE2_n0 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/explain_ddl.q.out ql/src/test/results/clientpositive/explain_ddl.q.out index aea46d304f..1022c1e557 100644 --- ql/src/test/results/clientpositive/explain_ddl.q.out +++ ql/src/test/results/clientpositive/explain_ddl.q.out @@ -2,14 +2,14 @@ PREHOOK: query: CREATE VIEW V1_n0 AS SELECT key, value from src PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n0 +PREHOOK: Output: default@v1_n0 POSTHOOK: query: CREATE VIEW V1_n0 AS SELECT key, value from src POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n0 -POSTHOOK: Lineage: V1_n0.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V1_n0.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v1_n0 +POSTHOOK: Lineage: v1_n0.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v1_n0.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select count(*) from V1_n0 where key > 0 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -25,12 +25,12 @@ PREHOOK: query: CREATE TABLE M1 AS SELECT key, value from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: CREATE TABLE M1 AS SELECT key, value from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 POSTHOOK: Lineage: m1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: m1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select count(*) from M1 where key > 0 @@ -46,12 +46,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 @@ -81,7 +81,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -129,10 +129,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.M1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.m1 Stage: Stage-2 Stats Work @@ -140,7 +140,7 @@ STAGE PLANS: Column Stats Desc: Columns: key, value Column Types: string, string - Table: default.M1 + Table: default.m1 Stage: Stage-3 Map Reduce @@ -152,7 +152,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-5 Map Reduce @@ -164,7 +164,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-6 Move Operator @@ -176,12 +176,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from M1 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@m1 PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from M1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@m1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 @@ -211,7 +211,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -259,10 +259,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.M1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.m1 Stage: Stage-2 Stats Work @@ -270,7 +270,7 @@ STAGE PLANS: Column Stats Desc: Columns: key, value Column Types: string, string - Table: default.M1 + Table: default.m1 Stage: Stage-3 Map Reduce @@ -282,7 +282,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-5 Map Reduce @@ -294,7 +294,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-6 Move Operator @@ -307,13 +307,13 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Input: default@v1_n0 PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 AS select * from V1_n0 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Input: default@v1_n0 POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 @@ -345,7 +345,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -393,10 +393,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.M1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.m1 Stage: Stage-2 Stats Work @@ -404,7 +404,7 @@ STAGE PLANS: Column Stats Desc: Columns: key, value Column Types: string, string - Table: default.M1 + Table: default.m1 Stage: Stage-3 Map Reduce @@ -416,7 +416,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-5 Map Reduce @@ -428,7 +428,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.M1 + name: default.m1 Stage: Stage-6 Move Operator @@ -440,12 +440,12 @@ PREHOOK: query: EXPLAIN CREATE TABLE V1_n0 AS select * from M1 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@m1 PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n0 +PREHOOK: Output: default@v1_n0 POSTHOOK: query: EXPLAIN CREATE TABLE V1_n0 AS select * from M1 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@m1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n0 +POSTHOOK: Output: default@v1_n0 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 @@ -475,7 +475,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.V1_n0 + name: default.v1_n0 Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: col1, col2 @@ -523,10 +523,10 @@ STAGE PLANS: Stage: Stage-8 Create Table columns: key string, value string - name: default.V1_n0 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.v1_n0 Stage: Stage-2 Stats Work @@ -534,7 +534,7 @@ STAGE PLANS: Column Stats Desc: Columns: key, value Column Types: string, string - Table: default.V1_n0 + Table: default.v1_n0 Stage: Stage-3 Map Reduce @@ -546,7 +546,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.V1_n0 + name: default.v1_n0 Stage: Stage-5 Map Reduce @@ -558,7 +558,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.V1_n0 + name: default.v1_n0 Stage: Stage-6 Move Operator @@ -570,12 +570,12 @@ PREHOOK: query: EXPLAIN CREATE VIEW V1_n0 AS select * from M1 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@m1 PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n0 +PREHOOK: Output: default@v1_n0 POSTHOOK: query: EXPLAIN CREATE VIEW V1_n0 AS select * from M1 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@m1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n0 +POSTHOOK: Output: default@v1_n0 STAGE DEPENDENCIES: Stage-1 is a root stage @@ -584,17 +584,17 @@ STAGE PLANS: Create View columns: key string, value string expanded text: select `m1`.`key`, `m1`.`value` from `default`.`M1` - name: default.V1_n0 + name: hive.default.v1_n0 original text: select * from M1 PREHOOK: query: EXPLAIN CREATE TABLE M1 LIKE src PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 LIKE src POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-0 is a root stage @@ -605,18 +605,18 @@ STAGE PLANS: default output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat default serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe like: src - name: default.M1 + name: hive.default.m1 table properties: bucketing_version 2 PREHOOK: query: EXPLAIN CREATE TABLE M1 LIKE M1 PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@M1 +PREHOOK: Output: default@m1 POSTHOOK: query: EXPLAIN CREATE TABLE M1 LIKE M1 POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@M1 +POSTHOOK: Output: default@m1 STAGE DEPENDENCIES: Stage-0 is a root stage @@ -626,8 +626,8 @@ STAGE PLANS: default input format: org.apache.hadoop.mapred.TextInputFormat default output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat default serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - like: M1 - name: default.M1 + like: m1 + name: hive.default.m1 table properties: bucketing_version 2 @@ -645,7 +645,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Drop Table - table: M1 + table: m1 PREHOOK: query: select count(*) from M1 where key > 0 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/explain_dependency.q.out ql/src/test/results/clientpositive/explain_dependency.q.out index da1036b3c7..a1e9ab8411 100644 --- ql/src/test/results/clientpositive/explain_dependency.q.out +++ ql/src/test/results/clientpositive/explain_dependency.q.out @@ -2,27 +2,27 @@ PREHOOK: query: CREATE VIEW V1_n6 AS SELECT key, value from src PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n6 +PREHOOK: Output: default@v1_n6 POSTHOOK: query: CREATE VIEW V1_n6 AS SELECT key, value from src POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n6 -POSTHOOK: Lineage: V1_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V1_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v1_n6 +POSTHOOK: Lineage: v1_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v1_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V2_n1 AS SELECT ds, key, value FROM srcpart WHERE ds IS NOT NULL PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:default -PREHOOK: Output: default@V2_n1 +PREHOOK: Output: default@v2_n1 POSTHOOK: query: CREATE VIEW V2_n1 AS SELECT ds, key, value FROM srcpart WHERE ds IS NOT NULL POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:default -POSTHOOK: Output: default@V2_n1 -POSTHOOK: Lineage: V2_n1.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] -POSTHOOK: Lineage: V2_n1.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V2_n1.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v2_n1 +POSTHOOK: Lineage: v2_n1.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: v2_n1.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v2_n1.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V3 AS SELECT src1.key, src2.value FROM V2_n1 src1 JOIN src src2 ON src1.key = src2.key WHERE src1.ds IS NOT NULL @@ -31,7 +31,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@srcpart PREHOOK: Input: default@v2_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@V3 +PREHOOK: Output: default@v3 POSTHOOK: query: CREATE VIEW V3 AS SELECT src1.key, src2.value FROM V2_n1 src1 JOIN src src2 ON src1.key = src2.key WHERE src1.ds IS NOT NULL @@ -40,9 +40,9 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@v2_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V3 -POSTHOOK: Lineage: V3.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V3.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v3 +POSTHOOK: Lineage: v3.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v3.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V4 AS SELECT src1.key, src2.value as value1, src3.value as value2 FROM V1_n6 src1 JOIN V2_n1 src2 on src1.key = src2.key JOIN src src3 ON src2.key = src3.key @@ -52,7 +52,7 @@ PREHOOK: Input: default@srcpart PREHOOK: Input: default@v1_n6 PREHOOK: Input: default@v2_n1 PREHOOK: Output: database:default -PREHOOK: Output: default@V4 +PREHOOK: Output: default@v4 POSTHOOK: query: CREATE VIEW V4 AS SELECT src1.key, src2.value as value1, src3.value as value2 FROM V1_n6 src1 JOIN V2_n1 src2 on src1.key = src2.key JOIN src src3 ON src2.key = src3.key @@ -62,10 +62,10 @@ POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@v1_n6 POSTHOOK: Input: default@v2_n1 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V4 -POSTHOOK: Lineage: V4.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V4.value1 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: V4.value2 SIMPLE [(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v4 +POSTHOOK: Lineage: v4.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v4.value1 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: v4.value2 SIMPLE [(src)src3.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN DEPENDENCY SELECT key, count(1) FROM srcpart WHERE ds IS NOT NULL GROUP BY key PREHOOK: type: QUERY @@ -224,16 +224,16 @@ PREHOOK: query: CREATE VIEW V5 as SELECT * FROM srcpart where ds = '10' PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:default -PREHOOK: Output: default@V5 +PREHOOK: Output: default@v5 POSTHOOK: query: CREATE VIEW V5 as SELECT * FROM srcpart where ds = '10' POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:default -POSTHOOK: Output: default@V5 -POSTHOOK: Lineage: V5.ds SIMPLE [] -POSTHOOK: Lineage: V5.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] -POSTHOOK: Lineage: V5.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V5.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v5 +POSTHOOK: Lineage: v5.ds SIMPLE [] +POSTHOOK: Lineage: v5.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: v5.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v5.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN DEPENDENCY SELECT * FROM V5 PREHOOK: type: QUERY PREHOOK: Input: default@srcpart diff --git ql/src/test/results/clientpositive/explain_logical.q.out ql/src/test/results/clientpositive/explain_logical.q.out index 56c47d6025..f33f03eaf7 100644 --- ql/src/test/results/clientpositive/explain_logical.q.out +++ ql/src/test/results/clientpositive/explain_logical.q.out @@ -2,27 +2,27 @@ PREHOOK: query: CREATE VIEW V1_n8 AS SELECT key, value from src PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@V1_n8 +PREHOOK: Output: default@v1_n8 POSTHOOK: query: CREATE VIEW V1_n8 AS SELECT key, value from src POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@V1_n8 -POSTHOOK: Lineage: V1_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V1_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v1_n8 +POSTHOOK: Lineage: v1_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v1_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V2_n3 AS SELECT ds, key, value FROM srcpart WHERE ds IS NOT NULL PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:default -PREHOOK: Output: default@V2_n3 +PREHOOK: Output: default@v2_n3 POSTHOOK: query: CREATE VIEW V2_n3 AS SELECT ds, key, value FROM srcpart WHERE ds IS NOT NULL POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:default -POSTHOOK: Output: default@V2_n3 -POSTHOOK: Lineage: V2_n3.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] -POSTHOOK: Lineage: V2_n3.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V2_n3.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v2_n3 +POSTHOOK: Lineage: v2_n3.ds SIMPLE [(srcpart)srcpart.FieldSchema(name:ds, type:string, comment:null), ] +POSTHOOK: Lineage: v2_n3.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v2_n3.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V3_n1 AS SELECT src1.key, src2.value FROM V2_n3 src1 JOIN src src2 ON src1.key = src2.key WHERE src1.ds IS NOT NULL @@ -31,7 +31,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@srcpart PREHOOK: Input: default@v2_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@V3_n1 +PREHOOK: Output: default@v3_n1 POSTHOOK: query: CREATE VIEW V3_n1 AS SELECT src1.key, src2.value FROM V2_n3 src1 JOIN src src2 ON src1.key = src2.key WHERE src1.ds IS NOT NULL @@ -40,9 +40,9 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@v2_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V3_n1 -POSTHOOK: Lineage: V3_n1.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V3_n1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v3_n1 +POSTHOOK: Lineage: v3_n1.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v3_n1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: CREATE VIEW V4_n1 AS SELECT src1.key, src2.value as value1, src3.value as value2 FROM V1_n8 src1 JOIN V2_n3 src2 on src1.key = src2.key JOIN src src3 ON src2.key = src3.key @@ -52,7 +52,7 @@ PREHOOK: Input: default@srcpart PREHOOK: Input: default@v1_n8 PREHOOK: Input: default@v2_n3 PREHOOK: Output: database:default -PREHOOK: Output: default@V4_n1 +PREHOOK: Output: default@v4_n1 POSTHOOK: query: CREATE VIEW V4_n1 AS SELECT src1.key, src2.value as value1, src3.value as value2 FROM V1_n8 src1 JOIN V2_n3 src2 on src1.key = src2.key JOIN src src3 ON src2.key = src3.key @@ -62,10 +62,10 @@ POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@v1_n8 POSTHOOK: Input: default@v2_n3 POSTHOOK: Output: database:default -POSTHOOK: Output: default@V4_n1 -POSTHOOK: Lineage: V4_n1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V4_n1.value1 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: V4_n1.value2 SIMPLE [(src)src3.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v4_n1 +POSTHOOK: Lineage: v4_n1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v4_n1.value1 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: v4_n1.value2 SIMPLE [(src)src3.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN LOGICAL SELECT key, count(1) FROM srcpart WHERE ds IS NOT NULL GROUP BY key PREHOOK: type: QUERY @@ -558,16 +558,16 @@ PREHOOK: query: CREATE VIEW V5_n0 as SELECT * FROM srcpart where ds = '10' PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart PREHOOK: Output: database:default -PREHOOK: Output: default@V5_n0 +PREHOOK: Output: default@v5_n0 POSTHOOK: query: CREATE VIEW V5_n0 as SELECT * FROM srcpart where ds = '10' POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart POSTHOOK: Output: database:default -POSTHOOK: Output: default@V5_n0 -POSTHOOK: Lineage: V5_n0.ds SIMPLE [] -POSTHOOK: Lineage: V5_n0.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] -POSTHOOK: Lineage: V5_n0.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: V5_n0.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Output: default@v5_n0 +POSTHOOK: Lineage: v5_n0.ds SIMPLE [] +POSTHOOK: Lineage: v5_n0.hr SIMPLE [(srcpart)srcpart.FieldSchema(name:hr, type:string, comment:null), ] +POSTHOOK: Lineage: v5_n0.key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: v5_n0.value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: EXPLAIN LOGICAL SELECT * FROM V5_n0 PREHOOK: type: QUERY PREHOOK: Input: default@srcpart diff --git ql/src/test/results/clientpositive/filter_cond_pushdown_HIVE_15647.q.out ql/src/test/results/clientpositive/filter_cond_pushdown_HIVE_15647.q.out index 54f28df314..7698d04357 100644 --- ql/src/test/results/clientpositive/filter_cond_pushdown_HIVE_15647.q.out +++ ql/src/test/results/clientpositive/filter_cond_pushdown_HIVE_15647.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE sales_HIVE_15647 (store_id INTEGER, store_number INTEGER, customer_id INTEGER) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@sales_HIVE_15647 +PREHOOK: Output: default@sales_hive_15647 POSTHOOK: query: CREATE TABLE sales_HIVE_15647 (store_id INTEGER, store_number INTEGER, customer_id INTEGER) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@sales_HIVE_15647 +POSTHOOK: Output: default@sales_hive_15647 PREHOOK: query: CREATE TABLE store_HIVE_15647 (store_id INTEGER, salad_bar BOOLEAN) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@store_HIVE_15647 +PREHOOK: Output: default@store_hive_15647 POSTHOOK: query: CREATE TABLE store_HIVE_15647 (store_id INTEGER, salad_bar BOOLEAN) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@store_HIVE_15647 +POSTHOOK: Output: default@store_hive_15647 PREHOOK: query: explain select count(*) from sales_HIVE_15647 as sales join store_HIVE_15647 as store on sales.store_id = store.store_id diff --git ql/src/test/results/clientpositive/filter_join_breaktask2.q.out ql/src/test/results/clientpositive/filter_join_breaktask2.q.out index eab45b9315..72dd105f81 100644 --- ql/src/test/results/clientpositive/filter_join_breaktask2.q.out +++ ql/src/test/results/clientpositive/filter_join_breaktask2.q.out @@ -2,36 +2,36 @@ PREHOOK: query: create table T1_n85(c1 string, c2 string, c3 string, c4 string, partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n85 +PREHOOK: Output: default@t1_n85 POSTHOOK: query: create table T1_n85(c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n85 +POSTHOOK: Output: default@t1_n85 PREHOOK: query: create table T2_n53(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n53 +PREHOOK: Output: default@t2_n53 POSTHOOK: query: create table T2_n53(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n53 +POSTHOOK: Output: default@t2_n53 PREHOOK: query: create table T3_n18 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n18 +PREHOOK: Output: default@t3_n18 POSTHOOK: query: create table T3_n18 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n18 +POSTHOOK: Output: default@t3_n18 PREHOOK: query: create table T4_n8 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n8 +PREHOOK: Output: default@t4_n8 POSTHOOK: query: create table T4_n8 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n8 +POSTHOOK: Output: default@t4_n8 PREHOOK: query: insert overwrite table T1_n85 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/groupby10.q.out ql/src/test/results/clientpositive/groupby10.q.out index 583f2fbc28..cd33bdba5c 100644 --- ql/src/test/results/clientpositive/groupby10.q.out +++ ql/src/test/results/clientpositive/groupby10.q.out @@ -17,11 +17,11 @@ POSTHOOK: Output: default@dest2 PREHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT +PREHOOK: Output: default@input POSTHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT +POSTHOOK: Output: default@input PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby13.q.out ql/src/test/results/clientpositive/groupby13.q.out index a32a654afa..14a3dd57c2 100644 --- ql/src/test/results/clientpositive/groupby13.q.out +++ ql/src/test/results/clientpositive/groupby13.q.out @@ -94,11 +94,11 @@ STAGE PLANS: PREHOOK: query: create table aGBY (i int, j string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@aGBY +PREHOOK: Output: default@agby POSTHOOK: query: create table aGBY (i int, j string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@aGBY +POSTHOOK: Output: default@agby PREHOOK: query: insert into aGBY values ( 1, 'a'),(2,'b') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table diff --git ql/src/test/results/clientpositive/groupby7.q.out ql/src/test/results/clientpositive/groupby7.q.out index 3dd3a5e588..67b6ed1877 100644 --- ql/src/test/results/clientpositive/groupby7.q.out +++ ql/src/test/results/clientpositive/groupby7.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n132(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n132 +PREHOOK: Output: default@dest1_n132 POSTHOOK: query: CREATE TABLE DEST1_n132(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n132 +POSTHOOK: Output: default@dest1_n132 PREHOOK: query: CREATE TABLE DEST2_n34(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n34 +PREHOOK: Output: default@dest2_n34 POSTHOOK: query: CREATE TABLE DEST2_n34(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n34 +POSTHOOK: Output: default@dest2_n34 PREHOOK: query: FROM SRC INSERT OVERWRITE TABLE DEST1_n132 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key INSERT OVERWRITE TABLE DEST2_n34 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_map.q.out ql/src/test/results/clientpositive/groupby7_map.q.out index 08b0db51e1..e54d256141 100644 --- ql/src/test/results/clientpositive/groupby7_map.q.out +++ ql/src/test/results/clientpositive/groupby7_map.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n82(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n82 +PREHOOK: Output: default@dest1_n82 POSTHOOK: query: CREATE TABLE DEST1_n82(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n82 +POSTHOOK: Output: default@dest1_n82 PREHOOK: query: CREATE TABLE DEST2_n19(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n19 +PREHOOK: Output: default@dest2_n19 POSTHOOK: query: CREATE TABLE DEST2_n19(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n19 +POSTHOOK: Output: default@dest2_n19 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n82 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out index e17ef4e8da..f58cbb9fca 100644 --- ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n15(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n15 +PREHOOK: Output: default@dest1_n15 POSTHOOK: query: CREATE TABLE DEST1_n15(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n15 +POSTHOOK: Output: default@dest1_n15 PREHOOK: query: CREATE TABLE DEST2_n3(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n3 +PREHOOK: Output: default@dest2_n3 POSTHOOK: query: CREATE TABLE DEST2_n3(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n3 +POSTHOOK: Output: default@dest2_n3 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n15 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_map_skew.q.out ql/src/test/results/clientpositive/groupby7_map_skew.q.out index 4a06aa7830..a297d91a38 100644 --- ql/src/test/results/clientpositive/groupby7_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby7_map_skew.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n21(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n21 +PREHOOK: Output: default@dest1_n21 POSTHOOK: query: CREATE TABLE DEST1_n21(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n21 +POSTHOOK: Output: default@dest1_n21 PREHOOK: query: CREATE TABLE DEST2_n5(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n5 +PREHOOK: Output: default@dest2_n5 POSTHOOK: query: CREATE TABLE DEST2_n5(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n5 +POSTHOOK: Output: default@dest2_n5 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n21 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_noskew.q.out ql/src/test/results/clientpositive/groupby7_noskew.q.out index d60d67e0a8..1aea59622a 100644 --- ql/src/test/results/clientpositive/groupby7_noskew.q.out +++ ql/src/test/results/clientpositive/groupby7_noskew.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n101(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n101 +PREHOOK: Output: default@dest1_n101 POSTHOOK: query: CREATE TABLE DEST1_n101(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n101 +POSTHOOK: Output: default@dest1_n101 PREHOOK: query: CREATE TABLE DEST2_n28(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n28 +PREHOOK: Output: default@dest2_n28 POSTHOOK: query: CREATE TABLE DEST2_n28(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n28 +POSTHOOK: Output: default@dest2_n28 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n101 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out index 45a2afcebc..6ffff4469b 100644 --- ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/groupby7_noskew_multi_single_reducer.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n170(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n170 +PREHOOK: Output: default@dest1_n170 POSTHOOK: query: CREATE TABLE DEST1_n170(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n170 +POSTHOOK: Output: default@dest1_n170 PREHOOK: query: CREATE TABLE DEST2_n42(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n42 +PREHOOK: Output: default@dest2_n42 POSTHOOK: query: CREATE TABLE DEST2_n42(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n42 +POSTHOOK: Output: default@dest2_n42 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n170 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key ORDER BY SRC.key limit 10 diff --git ql/src/test/results/clientpositive/groupby8.q.out ql/src/test/results/clientpositive/groupby8.q.out index 59a2334014..302588d70d 100644 --- ql/src/test/results/clientpositive/groupby8.q.out +++ ql/src/test/results/clientpositive/groupby8.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n71(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n71 +PREHOOK: Output: default@dest1_n71 POSTHOOK: query: CREATE TABLE DEST1_n71(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n71 +POSTHOOK: Output: default@dest1_n71 PREHOOK: query: CREATE TABLE DEST2_n15(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n15 +PREHOOK: Output: default@dest2_n15 POSTHOOK: query: CREATE TABLE DEST2_n15(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n15 +POSTHOOK: Output: default@dest2_n15 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n71 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby8_map.q.out ql/src/test/results/clientpositive/groupby8_map.q.out index c004bcaf13..62f9247a74 100644 --- ql/src/test/results/clientpositive/groupby8_map.q.out +++ ql/src/test/results/clientpositive/groupby8_map.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n136(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n136 +PREHOOK: Output: default@dest1_n136 POSTHOOK: query: CREATE TABLE DEST1_n136(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n136 +POSTHOOK: Output: default@dest1_n136 PREHOOK: query: CREATE TABLE DEST2_n35(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n35 +PREHOOK: Output: default@dest2_n35 POSTHOOK: query: CREATE TABLE DEST2_n35(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n35 +POSTHOOK: Output: default@dest2_n35 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n136 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby8_map_skew.q.out ql/src/test/results/clientpositive/groupby8_map_skew.q.out index 819196315b..f607f88616 100644 --- ql/src/test/results/clientpositive/groupby8_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby8_map_skew.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n87(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n87 +PREHOOK: Output: default@dest1_n87 POSTHOOK: query: CREATE TABLE DEST1_n87(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n87 +POSTHOOK: Output: default@dest1_n87 PREHOOK: query: CREATE TABLE DEST2_n22(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n22 +PREHOOK: Output: default@dest2_n22 POSTHOOK: query: CREATE TABLE DEST2_n22(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n22 +POSTHOOK: Output: default@dest2_n22 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n87 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby8_noskew.q.out ql/src/test/results/clientpositive/groupby8_noskew.q.out index fdc5a60438..3071bacfc4 100644 --- ql/src/test/results/clientpositive/groupby8_noskew.q.out +++ ql/src/test/results/clientpositive/groupby8_noskew.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n48(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n48 +PREHOOK: Output: default@dest1_n48 POSTHOOK: query: CREATE TABLE DEST1_n48(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n48 +POSTHOOK: Output: default@dest1_n48 PREHOOK: query: CREATE TABLE DEST2_n9(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n9 +PREHOOK: Output: default@dest2_n9 POSTHOOK: query: CREATE TABLE DEST2_n9(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n9 +POSTHOOK: Output: default@dest2_n9 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n48 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby9.q.out ql/src/test/results/clientpositive/groupby9.q.out index d46ca78e96..249561b3d1 100644 --- ql/src/test/results/clientpositive/groupby9.q.out +++ ql/src/test/results/clientpositive/groupby9.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n117(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n117 +PREHOOK: Output: default@dest1_n117 POSTHOOK: query: CREATE TABLE DEST1_n117(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n117 +POSTHOOK: Output: default@dest1_n117 PREHOOK: query: CREATE TABLE DEST2_n31(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n31 +PREHOOK: Output: default@dest2_n31 POSTHOOK: query: CREATE TABLE DEST2_n31(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n31 +POSTHOOK: Output: default@dest2_n31 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n117 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/groupby_complex_types.q.out ql/src/test/results/clientpositive/groupby_complex_types.q.out index e784a5e04a..d3cb3400f6 100644 --- ql/src/test/results/clientpositive/groupby_complex_types.q.out +++ ql/src/test/results/clientpositive/groupby_complex_types.q.out @@ -1,27 +1,27 @@ PREHOOK: query: CREATE TABLE DEST1_n163(key ARRAY, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n163 +PREHOOK: Output: default@dest1_n163 POSTHOOK: query: CREATE TABLE DEST1_n163(key ARRAY, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n163 +POSTHOOK: Output: default@dest1_n163 PREHOOK: query: CREATE TABLE DEST2_n41(key MAP, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n41 +PREHOOK: Output: default@dest2_n41 POSTHOOK: query: CREATE TABLE DEST2_n41(key MAP, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n41 +POSTHOOK: Output: default@dest2_n41 PREHOOK: query: CREATE TABLE DEST3_n7(key STRUCT, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST3_n7 +PREHOOK: Output: default@dest3_n7 POSTHOOK: query: CREATE TABLE DEST3_n7(key STRUCT, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST3_n7 +POSTHOOK: Output: default@dest3_n7 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n163 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key) diff --git ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out index dd2ea4a357..8053e4ad2c 100644 --- ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/groupby_complex_types_multi_single_reducer.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n47(key ARRAY, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n47 +PREHOOK: Output: default@dest1_n47 POSTHOOK: query: CREATE TABLE DEST1_n47(key ARRAY, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n47 +POSTHOOK: Output: default@dest1_n47 PREHOOK: query: CREATE TABLE DEST2_n8(key MAP, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n8 +PREHOOK: Output: default@dest2_n8 POSTHOOK: query: CREATE TABLE DEST2_n8(key MAP, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n8 +POSTHOOK: Output: default@dest2_n8 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n47 SELECT ARRAY(SRC.key) as keyarray, COUNT(1) GROUP BY ARRAY(SRC.key) ORDER BY keyarray limit 10 diff --git ql/src/test/results/clientpositive/groupby_cube1.q.out ql/src/test/results/clientpositive/groupby_cube1.q.out index 0ac1490e34..f47319e323 100644 --- ql/src/test/results/clientpositive/groupby_cube1.q.out +++ ql/src/test/results/clientpositive/groupby_cube1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n82(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n82 +PREHOOK: Output: default@t1_n82 POSTHOOK: query: CREATE TABLE T1_n82(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n82 +POSTHOOK: Output: default@t1_n82 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n82 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -562,19 +562,19 @@ NULL 6 PREHOOK: query: CREATE TABLE T2_n51(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n51 +PREHOOK: Output: default@t2_n51 POSTHOOK: query: CREATE TABLE T2_n51(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n51 +POSTHOOK: Output: default@t2_n51 PREHOOK: query: CREATE TABLE T3_n16(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n16 +PREHOOK: Output: default@t3_n16 POSTHOOK: query: CREATE TABLE T3_n16(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n16 +POSTHOOK: Output: default@t3_n16 PREHOOK: query: EXPLAIN FROM T1_n82 INSERT OVERWRITE TABLE T2_n51 SELECT key, val, count(1) group by key, val with cube diff --git ql/src/test/results/clientpositive/groupby_grouping_id1.q.out ql/src/test/results/clientpositive/groupby_grouping_id1.q.out index 71fe6c9612..8fc1b7ac0a 100644 --- ql/src/test/results/clientpositive/groupby_grouping_id1.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_id1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n158(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n158 +PREHOOK: Output: default@t1_n158 POSTHOOK: query: CREATE TABLE T1_n158(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n158 +POSTHOOK: Output: default@t1_n158 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n158 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_id3.q.out ql/src/test/results/clientpositive/groupby_grouping_id3.q.out index cdc063b370..aa122cd4f4 100644 --- ql/src/test/results/clientpositive/groupby_grouping_id3.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_id3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n86(key INT, value INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n86 +PREHOOK: Output: default@t1_n86 POSTHOOK: query: CREATE TABLE T1_n86(key INT, value INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n86 +POSTHOOK: Output: default@t1_n86 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1_n86 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out index 43ab99b9f1..4f21e790d0 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n41(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n41 +PREHOOK: Output: default@t1_n41 POSTHOOK: query: CREATE TABLE T1_n41(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n41 +POSTHOOK: Output: default@t1_n41 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n41 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out index 7831a49e95..1066792a64 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n81(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n81 +PREHOOK: Output: default@t1_n81 POSTHOOK: query: CREATE TABLE T1_n81(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n81 +POSTHOOK: Output: default@t1_n81 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n81 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -341,11 +341,11 @@ NULL NULL 23.0 PREHOOK: query: CREATE TABLE T2_n50(a STRING, b STRING, c int, d int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n50 +PREHOOK: Output: default@t2_n50 POSTHOOK: query: CREATE TABLE T2_n50(a STRING, b STRING, c int, d int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n50 +POSTHOOK: Output: default@t2_n50 PREHOOK: query: INSERT OVERWRITE TABLE T2_n50 SELECT a, b, c, c from T1_n81 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out index a08dd02490..e29ab4f91a 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n118(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n118 +PREHOOK: Output: default@t1_n118 POSTHOOK: query: CREATE TABLE T1_n118(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n118 +POSTHOOK: Output: default@t1_n118 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets1.txt' INTO TABLE T1_n118 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out index b61aba926d..9ca2fb196f 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n143(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n143 +PREHOOK: Output: default@t1_n143 POSTHOOK: query: CREATE TABLE T1_n143(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n143 +POSTHOOK: Output: default@t1_n143 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n143 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out index b6b4dcb339..6c57bd2d3f 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n24(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n24 +PREHOOK: Output: default@t1_n24 POSTHOOK: query: CREATE TABLE T1_n24(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n24 +POSTHOOK: Output: default@t1_n24 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n24 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out index f6571b4645..4926414194 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n75(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n75 +PREHOOK: Output: default@t1_n75 POSTHOOK: query: CREATE TABLE T1_n75(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n75 +POSTHOOK: Output: default@t1_n75 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n75 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out index 93e081b729..5179ec8125 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n64(key INT, value INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n64 +PREHOOK: Output: default@t1_n64 POSTHOOK: query: CREATE TABLE T1_n64(key INT, value INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n64 +POSTHOOK: Output: default@t1_n64 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1_n64 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_grouping_sets_limit.q.out ql/src/test/results/clientpositive/groupby_grouping_sets_limit.q.out index b4aa6d1dd0..0c99a03a77 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets_limit.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets_limit.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n141(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n141 +PREHOOK: Output: default@t1_n141 POSTHOOK: query: CREATE TABLE T1_n141(a STRING, b STRING, c STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n141 +POSTHOOK: Output: default@t1_n141 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/grouping_sets.txt' INTO TABLE T1_n141 PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_nocolumnalign.q.out ql/src/test/results/clientpositive/groupby_nocolumnalign.q.out index 19ae1380fd..fe9bae674a 100644 --- ql/src/test/results/clientpositive/groupby_nocolumnalign.q.out +++ ql/src/test/results/clientpositive/groupby_nocolumnalign.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE DEST2_n31_2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n31_2 +PREHOOK: Output: default@dest2_n31_2 POSTHOOK: query: CREATE TABLE DEST2_n31_2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n31_2 +POSTHOOK: Output: default@dest2_n31_2 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST2_n31_2 SELECT SRC.key, SRC.value, COUNT(DISTINCT SUBSTR(SRC.value,5)) GROUP BY SRC.key, SRC.value diff --git ql/src/test/results/clientpositive/groupby_position.q.out ql/src/test/results/clientpositive/groupby_position.q.out index f52623a534..cb1b95a381 100644 --- ql/src/test/results/clientpositive/groupby_position.q.out +++ ql/src/test/results/clientpositive/groupby_position.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE testTable1(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testTable1 +PREHOOK: Output: default@testtable1 POSTHOOK: query: CREATE TABLE testTable1(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testTable1 +POSTHOOK: Output: default@testtable1 PREHOOK: query: CREATE TABLE testTable2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@testTable2 +PREHOOK: Output: default@testtable2 POSTHOOK: query: CREATE TABLE testTable2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@testTable2 +POSTHOOK: Output: default@testtable2 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE testTable1 SELECT SRC.key, COUNT(DISTINCT SUBSTR(SRC.value,5)) WHERE SRC.key < 20 GROUP BY 1 diff --git ql/src/test/results/clientpositive/groupby_rollup1.q.out ql/src/test/results/clientpositive/groupby_rollup1.q.out index e7b61b4a33..efc0533799 100644 --- ql/src/test/results/clientpositive/groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/groupby_rollup1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE T1_n91(key STRING, val STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n91 +PREHOOK: Output: default@t1_n91 POSTHOOK: query: CREATE TABLE T1_n91(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n91 +POSTHOOK: Output: default@t1_n91 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n91 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -395,19 +395,19 @@ NULL 6 PREHOOK: query: CREATE TABLE T2_n56(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n56 +PREHOOK: Output: default@t2_n56 POSTHOOK: query: CREATE TABLE T2_n56(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n56 +POSTHOOK: Output: default@t2_n56 PREHOOK: query: CREATE TABLE T3_n20(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n20 +PREHOOK: Output: default@t3_n20 POSTHOOK: query: CREATE TABLE T3_n20(key1 STRING, key2 STRING, val INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n20 +POSTHOOK: Output: default@t3_n20 PREHOOK: query: EXPLAIN FROM T1_n91 INSERT OVERWRITE TABLE T2_n56 SELECT key, val, count(1) group by key, val with rollup diff --git ql/src/test/results/clientpositive/groupby_sort_10.q.out ql/src/test/results/clientpositive/groupby_sort_10.q.out index 570d3eeeaf..227238c7f0 100644 --- ql/src/test/results/clientpositive/groupby_sort_10.q.out +++ ql/src/test/results/clientpositive/groupby_sort_10.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n149(key STRING, val STRING) PARTITIONED BY (ds CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n149 +PREHOOK: Output: default@t1_n149 POSTHOOK: query: CREATE TABLE T1_n149(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n149 +POSTHOOK: Output: default@t1_n149 PREHOOK: query: INSERT OVERWRITE TABLE T1_n149 PARTITION (ds='1') SELECT * from src where key = 0 or key = 11 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/groupby_sort_11.q.out ql/src/test/results/clientpositive/groupby_sort_11.q.out index 76d3c7c51a..fa7b149bed 100644 --- ql/src/test/results/clientpositive/groupby_sort_11.q.out +++ ql/src/test/results/clientpositive/groupby_sort_11.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n18(key STRING, val STRING) PARTITIONED BY (ds s CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n18 +PREHOOK: Output: default@t1_n18 POSTHOOK: query: CREATE TABLE T1_n18(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n18 +POSTHOOK: Output: default@t1_n18 PREHOOK: query: INSERT OVERWRITE TABLE T1_n18 PARTITION (ds='1') SELECT * from src where key < 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/groupby_sort_1_23.q.out ql/src/test/results/clientpositive/groupby_sort_1_23.q.out index 6498e2422d..3643232133 100644 --- ql/src/test/results/clientpositive/groupby_sort_1_23.q.out +++ ql/src/test/results/clientpositive/groupby_sort_1_23.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n80(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n80 +PREHOOK: Output: default@t1_n80 POSTHOOK: query: CREATE TABLE T1_n80(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n80 +POSTHOOK: Output: default@t1_n80 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n80 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n80.val SIMPLE [(t1_n80)t1_n80.FieldSchema(name:val, type: PREHOOK: query: CREATE TABLE outputTbl1_n18(key int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n18 +PREHOOK: Output: default@outputtbl1_n18 POSTHOOK: query: CREATE TABLE outputTbl1_n18(key int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n18 +POSTHOOK: Output: default@outputtbl1_n18 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl1_n18 SELECT key, count(1) FROM T1_n80 GROUP BY key @@ -480,11 +480,11 @@ POSTHOOK: Input: default@outputtbl1_n18 PREHOOK: query: CREATE TABLE outputTbl2_n5(key1 int, key2 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n5 +PREHOOK: Output: default@outputtbl2_n5 POSTHOOK: query: CREATE TABLE outputTbl2_n5(key1 int, key2 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n5 +POSTHOOK: Output: default@outputtbl2_n5 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl2_n5 SELECT key, val, count(1) FROM T1_n80 GROUP BY key, val @@ -1691,11 +1691,11 @@ POSTHOOK: Input: default@outputtbl1_n18 PREHOOK: query: CREATE TABLE outputTbl3_n2(key1 int, key2 int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl3_n2 +PREHOOK: Output: default@outputtbl3_n2 POSTHOOK: query: CREATE TABLE outputTbl3_n2(key1 int, key2 int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl3_n2 +POSTHOOK: Output: default@outputtbl3_n2 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl3_n2 SELECT 1, key, count(1) FROM T1_n80 GROUP BY 1, key @@ -2143,11 +2143,11 @@ POSTHOOK: Input: default@outputtbl3_n2 PREHOOK: query: CREATE TABLE outputTbl4_n2(key1 int, key2 int, key3 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl4_n2 +PREHOOK: Output: default@outputtbl4_n2 POSTHOOK: query: CREATE TABLE outputTbl4_n2(key1 int, key2 int, key3 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl4_n2 +POSTHOOK: Output: default@outputtbl4_n2 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl4_n2 SELECT key, 1, val, count(1) FROM T1_n80 GROUP BY key, 1, val @@ -4992,12 +4992,12 @@ PREHOOK: query: CREATE TABLE T2_n49(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n49 +PREHOOK: Output: default@t2_n49 POSTHOOK: query: CREATE TABLE T2_n49(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n49 +POSTHOOK: Output: default@t2_n49 PREHOOK: query: INSERT OVERWRITE TABLE T2_n49 select key, val from T1_n80 PREHOOK: type: QUERY PREHOOK: Input: default@t1_n80 @@ -5773,11 +5773,11 @@ POSTHOOK: Input: default@outputtbl4_n2 PREHOOK: query: CREATE TABLE outputTbl5_n2(key1 int, key2 int, key3 string, key4 int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl5_n2 +PREHOOK: Output: default@outputtbl5_n2 POSTHOOK: query: CREATE TABLE outputTbl5_n2(key1 int, key2 int, key3 string, key4 int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl5_n2 +POSTHOOK: Output: default@outputtbl5_n2 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl5_n2 SELECT key, 1, val, 2, count(1) FROM T2_n49 GROUP BY key, 1, val, 2 @@ -7150,19 +7150,19 @@ POSTHOOK: Input: default@outputtbl4_n2 PREHOOK: query: CREATE TABLE DEST1_n80(key INT, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n80 +PREHOOK: Output: default@dest1_n80 POSTHOOK: query: CREATE TABLE DEST1_n80(key INT, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n80 +POSTHOOK: Output: default@dest1_n80 PREHOOK: query: CREATE TABLE DEST2_n18(key INT, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n18 +PREHOOK: Output: default@dest2_n18 POSTHOOK: query: CREATE TABLE DEST2_n18(key INT, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n18 +POSTHOOK: Output: default@dest2_n18 PREHOOK: query: EXPLAIN FROM T2_n49 INSERT OVERWRITE TABLE DEST1_n80 SELECT key, count(1) GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_2.q.out ql/src/test/results/clientpositive/groupby_sort_2.q.out index a6b2403f47..1c1eef927e 100644 --- ql/src/test/results/clientpositive/groupby_sort_2.q.out +++ ql/src/test/results/clientpositive/groupby_sort_2.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n51(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n51 +PREHOOK: Output: default@t1_n51 POSTHOOK: query: CREATE TABLE T1_n51(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n51 +POSTHOOK: Output: default@t1_n51 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n51 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n51.val SIMPLE [(t1_n51)t1_n51.FieldSchema(name:val, type: PREHOOK: query: CREATE TABLE outputTbl1_n10(val string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n10 +PREHOOK: Output: default@outputtbl1_n10 POSTHOOK: query: CREATE TABLE outputTbl1_n10(val string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n10 +POSTHOOK: Output: default@outputtbl1_n10 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n10 SELECT val, count(1) FROM T1_n51 GROUP BY val diff --git ql/src/test/results/clientpositive/groupby_sort_3.q.out ql/src/test/results/clientpositive/groupby_sort_3.q.out index e657a28396..e979e85300 100644 --- ql/src/test/results/clientpositive/groupby_sort_3.q.out +++ ql/src/test/results/clientpositive/groupby_sort_3.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n89(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n89 +PREHOOK: Output: default@t1_n89 POSTHOOK: query: CREATE TABLE T1_n89(key STRING, val STRING) CLUSTERED BY (key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n89 +POSTHOOK: Output: default@t1_n89 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n89 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n89.val SIMPLE [(t1_n89)t1_n89.FieldSchema(name:val, type: PREHOOK: query: CREATE TABLE outputTbl1_n20(key string, val string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n20 +PREHOOK: Output: default@outputtbl1_n20 POSTHOOK: query: CREATE TABLE outputTbl1_n20(key string, val string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n20 +POSTHOOK: Output: default@outputtbl1_n20 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n20 SELECT key, val, count(1) FROM T1_n89 GROUP BY key, val @@ -201,11 +201,11 @@ POSTHOOK: Input: default@outputtbl1_n20 PREHOOK: query: CREATE TABLE outputTbl2_n7(key string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n7 +PREHOOK: Output: default@outputtbl2_n7 POSTHOOK: query: CREATE TABLE outputTbl2_n7(key string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n7 +POSTHOOK: Output: default@outputtbl2_n7 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl2_n7 SELECT key, count(1) FROM T1_n89 GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_4.q.out ql/src/test/results/clientpositive/groupby_sort_4.q.out index cadc717f68..3dc93ee96a 100644 --- ql/src/test/results/clientpositive/groupby_sort_4.q.out +++ ql/src/test/results/clientpositive/groupby_sort_4.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n133(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n133 +PREHOOK: Output: default@t1_n133 POSTHOOK: query: CREATE TABLE T1_n133(key STRING, val STRING) CLUSTERED BY (key, val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n133 +POSTHOOK: Output: default@t1_n133 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n133 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n133.val SIMPLE [(t1_n133)t1_n133.FieldSchema(name:val, ty PREHOOK: query: CREATE TABLE outputTbl1_n31(key STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n31 +PREHOOK: Output: default@outputtbl1_n31 POSTHOOK: query: CREATE TABLE outputTbl1_n31(key STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n31 +POSTHOOK: Output: default@outputtbl1_n31 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n31 SELECT key, count(1) FROM T1_n133 GROUP BY key @@ -185,11 +185,11 @@ POSTHOOK: Input: default@outputtbl1_n31 PREHOOK: query: CREATE TABLE outputTbl2_n8(key STRING, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n8 +PREHOOK: Output: default@outputtbl2_n8 POSTHOOK: query: CREATE TABLE outputTbl2_n8(key STRING, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n8 +POSTHOOK: Output: default@outputtbl2_n8 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl2_n8 SELECT key, val, count(1) FROM T1_n133 GROUP BY key, val diff --git ql/src/test/results/clientpositive/groupby_sort_5.q.out ql/src/test/results/clientpositive/groupby_sort_5.q.out index 90312062f9..215c47fefb 100644 --- ql/src/test/results/clientpositive/groupby_sort_5.q.out +++ ql/src/test/results/clientpositive/groupby_sort_5.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n6 +PREHOOK: Output: default@t1_n6 POSTHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n6 +POSTHOOK: Output: default@t1_n6 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n6 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -29,11 +29,11 @@ POSTHOOK: Lineage: t1_n6.val SIMPLE [(t1_n6)t1_n6.FieldSchema(name:val, type:str PREHOOK: query: CREATE TABLE outputTbl1_n5(key STRING, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n5 +PREHOOK: Output: default@outputtbl1_n5 POSTHOOK: query: CREATE TABLE outputTbl1_n5(key STRING, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n5 +POSTHOOK: Output: default@outputtbl1_n5 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n5 SELECT key, val, count(1) FROM T1_n6 GROUP BY key, val @@ -210,12 +210,12 @@ PREHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val, key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n6 +PREHOOK: Output: default@t1_n6 POSTHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val, key) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n6 +POSTHOOK: Output: default@t1_n6 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n6 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -410,12 +410,12 @@ PREHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n6 +PREHOOK: Output: default@t1_n6 POSTHOOK: query: CREATE TABLE T1_n6(key STRING, val STRING) CLUSTERED BY (val) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n6 +POSTHOOK: Output: default@t1_n6 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n6 PREHOOK: type: LOAD #### A masked pattern was here #### @@ -437,11 +437,11 @@ POSTHOOK: Lineage: t1_n6.val SIMPLE [(t1_n6)t1_n6.FieldSchema(name:val, type:str PREHOOK: query: CREATE TABLE outputTbl2_n1(key STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl2_n1 +PREHOOK: Output: default@outputtbl2_n1 POSTHOOK: query: CREATE TABLE outputTbl2_n1(key STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl2_n1 +POSTHOOK: Output: default@outputtbl2_n1 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl2_n1 SELECT key, count(1) FROM T1_n6 GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_6.q.out ql/src/test/results/clientpositive/groupby_sort_6.q.out index 69306412a7..1aaa676b8c 100644 --- ql/src/test/results/clientpositive/groupby_sort_6.q.out +++ ql/src/test/results/clientpositive/groupby_sort_6.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE T1_n61(key STRING, val STRING) PARTITIONED BY (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n61 +PREHOOK: Output: default@t1_n61 POSTHOOK: query: CREATE TABLE T1_n61(key STRING, val STRING) PARTITIONED BY (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n61 +POSTHOOK: Output: default@t1_n61 PREHOOK: query: CREATE TABLE outputTbl1_n15(key int, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n15 +PREHOOK: Output: default@outputtbl1_n15 POSTHOOK: query: CREATE TABLE outputTbl1_n15(key int, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n15 +POSTHOOK: Output: default@outputtbl1_n15 PREHOOK: query: EXPLAIN EXTENDED INSERT OVERWRITE TABLE outputTbl1_n15 SELECT key, count(1) FROM T1_n61 where ds = '1' GROUP BY key diff --git ql/src/test/results/clientpositive/groupby_sort_7.q.out ql/src/test/results/clientpositive/groupby_sort_7.q.out index a0a193d720..df62e7a11d 100644 --- ql/src/test/results/clientpositive/groupby_sort_7.q.out +++ ql/src/test/results/clientpositive/groupby_sort_7.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n104(key STRING, val STRING) PARTITIONED BY (ds CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n104 +PREHOOK: Output: default@t1_n104 POSTHOOK: query: CREATE TABLE T1_n104(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (val) SORTED BY (key, val) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n104 +POSTHOOK: Output: default@t1_n104 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n104 PARTITION (ds='1') PREHOOK: type: LOAD #### A masked pattern was here #### @@ -32,11 +32,11 @@ POSTHOOK: Lineage: t1_n104 PARTITION(ds=1).val SIMPLE [(t1_n104)t1_n104.FieldSch PREHOOK: query: CREATE TABLE outputTbl1_n26(key STRING, val STRING, cnt INT) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@outputTbl1_n26 +PREHOOK: Output: default@outputtbl1_n26 POSTHOOK: query: CREATE TABLE outputTbl1_n26(key STRING, val STRING, cnt INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@outputTbl1_n26 +POSTHOOK: Output: default@outputtbl1_n26 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE outputTbl1_n26 SELECT key, val, count(1) FROM T1_n104 where ds = '1' GROUP BY key, val diff --git ql/src/test/results/clientpositive/groupby_sort_8.q.out ql/src/test/results/clientpositive/groupby_sort_8.q.out index b5f581e6e6..983693204f 100644 --- ql/src/test/results/clientpositive/groupby_sort_8.q.out +++ ql/src/test/results/clientpositive/groupby_sort_8.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n45(key STRING, val STRING) PARTITIONED BY (ds s CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n45 +PREHOOK: Output: default@t1_n45 POSTHOOK: query: CREATE TABLE T1_n45(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n45 +POSTHOOK: Output: default@t1_n45 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n45 PARTITION (ds='1') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/groupby_sort_9.q.out ql/src/test/results/clientpositive/groupby_sort_9.q.out index 33e21a3e08..f20d616d4e 100644 --- ql/src/test/results/clientpositive/groupby_sort_9.q.out +++ ql/src/test/results/clientpositive/groupby_sort_9.q.out @@ -2,12 +2,12 @@ PREHOOK: query: CREATE TABLE T1_n96(key STRING, val STRING) PARTITIONED BY (ds s CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n96 +PREHOOK: Output: default@t1_n96 POSTHOOK: query: CREATE TABLE T1_n96(key STRING, val STRING) PARTITIONED BY (ds string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n96 +POSTHOOK: Output: default@t1_n96 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bucket_files/000000_0' INTO TABLE T1_n96 PARTITION (ds='1') PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/input1.q.out ql/src/test/results/clientpositive/input1.q.out index 2c41c6e8a1..63e4bc2811 100644 --- ql/src/test/results/clientpositive/input1.q.out +++ ql/src/test/results/clientpositive/input1.q.out @@ -1,11 +1,11 @@ PREHOOK: query: CREATE TABLE TEST1_n6(A INT, B DOUBLE) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@TEST1_n6 +PREHOOK: Output: default@test1_n6 POSTHOOK: query: CREATE TABLE TEST1_n6(A INT, B DOUBLE) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@TEST1_n6 +POSTHOOK: Output: default@test1_n6 PREHOOK: query: EXPLAIN DESCRIBE TEST1_n6 PREHOOK: type: DESCTABLE @@ -21,7 +21,7 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 Describe Table - table: default.TEST1_n6 + table: default.test1_n6 Stage: Stage-1 Fetch Operator diff --git ql/src/test/results/clientpositive/input3.q.out ql/src/test/results/clientpositive/input3.q.out index 0365ff25ba..90d7a5f885 100644 --- ql/src/test/results/clientpositive/input3.q.out +++ ql/src/test/results/clientpositive/input3.q.out @@ -93,7 +93,7 @@ STAGE PLANS: Stage: Stage-0 Rename Table table name: default.TEST3b - new table name: default.TEST3c + new table name: hive.default.TEST3c PREHOOK: query: ALTER TABLE TEST3b RENAME TO TEST3c PREHOOK: type: ALTERTABLE_RENAME diff --git ql/src/test/results/clientpositive/spark/cross_product_check_1.q.out ql/src/test/results/clientpositive/spark/cross_product_check_1.q.out index 16d6d8f846..39fd474fd7 100644 --- ql/src/test/results/clientpositive/spark/cross_product_check_1.q.out +++ ql/src/test/results/clientpositive/spark/cross_product_check_1.q.out @@ -3,13 +3,13 @@ select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@A_n8 +PREHOOK: Output: default@a_n8 POSTHOOK: query: create table A_n8 as select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n8 +POSTHOOK: Output: default@a_n8 POSTHOOK: Lineage: a_n8.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: a_n8.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table B_n6 as @@ -18,14 +18,14 @@ limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@B_n6 +PREHOOK: Output: default@b_n6 POSTHOOK: query: create table B_n6 as select * from src limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n6 +POSTHOOK: Output: default@b_n6 POSTHOOK: Lineage: b_n6.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: b_n6.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Work 'Reducer 2' is a cross product diff --git ql/src/test/results/clientpositive/spark/cross_product_check_2.q.out ql/src/test/results/clientpositive/spark/cross_product_check_2.q.out index adebe73524..dc13bc78a4 100644 --- ql/src/test/results/clientpositive/spark/cross_product_check_2.q.out +++ ql/src/test/results/clientpositive/spark/cross_product_check_2.q.out @@ -3,13 +3,13 @@ select * from src PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@A_n2 +PREHOOK: Output: default@a_n2 POSTHOOK: query: create table A_n2 as select * from src POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@A_n2 +POSTHOOK: Output: default@a_n2 POSTHOOK: Lineage: a_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: a_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table B_n2 as @@ -18,14 +18,14 @@ limit 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@B_n2 +PREHOOK: Output: default@b_n2 POSTHOOK: query: create table B_n2 as select * from src order by key limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@B_n2 +POSTHOOK: Output: default@b_n2 POSTHOOK: Lineage: b_n2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: b_n2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-1:MAPRED' is a cross product diff --git ql/src/test/results/clientpositive/spark/ctas.q.out ql/src/test/results/clientpositive/spark/ctas.q.out index d6738a2b8f..eae9abcdb9 100644 --- ql/src/test/results/clientpositive/spark/ctas.q.out +++ ql/src/test/results/clientpositive/spark/ctas.q.out @@ -1,11 +1,11 @@ PREHOOK: query: create table nzhang_Tmp(a int, b string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_Tmp +PREHOOK: Output: default@nzhang_tmp POSTHOOK: query: create table nzhang_Tmp(a int, b string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_Tmp +POSTHOOK: Output: default@nzhang_tmp PREHOOK: query: select * from nzhang_Tmp PREHOOK: type: QUERY PREHOOK: Input: default@nzhang_tmp @@ -18,12 +18,12 @@ PREHOOK: query: explain create table nzhang_CTAS1 as select key k, value from sr PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1 +PREHOOK: Output: default@nzhang_ctas1 POSTHOOK: query: explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1 +POSTHOOK: Output: default@nzhang_ctas1 STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -87,7 +87,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_CTAS1 + name: default.nzhang_ctas1 Stage: Stage-0 Move Operator @@ -98,10 +98,10 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: k string, value string - name: default.nzhang_CTAS1 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas1 Stage: Stage-2 Stats Work @@ -111,12 +111,12 @@ PREHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort b PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default -PREHOOK: Output: default@nzhang_CTAS1 +PREHOOK: Output: default@nzhang_ctas1 POSTHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default -POSTHOOK: Output: default@nzhang_CTAS1 +POSTHOOK: Output: default@nzhang_ctas1 POSTHOOK: Lineage: nzhang_ctas1.k SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_ctas1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from nzhang_CTAS1 @@ -256,10 +256,10 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: key string, value string - name: default.nzhang_ctas2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas2 Stage: Stage-2 Stats Work @@ -414,10 +414,10 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: half_key double, conb string - name: default.nzhang_ctas3 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat serde name: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: hive.default.nzhang_ctas3 Stage: Stage-2 Stats Work @@ -636,11 +636,11 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: key string, value string - name: default.nzhang_ctas4 field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas4 Stage: Stage-2 Stats Work @@ -796,13 +796,13 @@ STAGE PLANS: Stage: Stage-3 Create Table columns: key string, value string - name: default.nzhang_ctas5 field delimiter: , input format: org.apache.hadoop.mapred.TextInputFormat line delimiter: output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: hive.default.nzhang_ctas5 Stage: Stage-2 Stats Work diff --git ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out index eab45b9315..72dd105f81 100644 --- ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out +++ ql/src/test/results/clientpositive/spark/filter_join_breaktask2.q.out @@ -2,36 +2,36 @@ PREHOOK: query: create table T1_n85(c1 string, c2 string, c3 string, c4 string, partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T1_n85 +PREHOOK: Output: default@t1_n85 POSTHOOK: query: create table T1_n85(c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T1_n85 +POSTHOOK: Output: default@t1_n85 PREHOOK: query: create table T2_n53(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T2_n53 +PREHOOK: Output: default@t2_n53 POSTHOOK: query: create table T2_n53(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T2_n53 +POSTHOOK: Output: default@t2_n53 PREHOOK: query: create table T3_n18 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T3_n18 +PREHOOK: Output: default@t3_n18 POSTHOOK: query: create table T3_n18 (c0 bigint, c1 bigint, c2 int) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T3_n18 +POSTHOOK: Output: default@t3_n18 PREHOOK: query: create table T4_n8 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@T4_n8 +PREHOOK: Output: default@t4_n8 POSTHOOK: query: create table T4_n8 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c81 bigint, c82 bigint, c83 bigint) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@T4_n8 +POSTHOOK: Output: default@t4_n8 PREHOOK: query: insert overwrite table T1_n85 partition (ds='2010-04-17') select '5', '1', '1', '1', 0, 0,4 from src tablesample (1 rows) PREHOOK: type: QUERY PREHOOK: Input: default@src diff --git ql/src/test/results/clientpositive/spark/groupby10.q.out ql/src/test/results/clientpositive/spark/groupby10.q.out index eb1d93d9ef..f01845d9aa 100644 --- ql/src/test/results/clientpositive/spark/groupby10.q.out +++ ql/src/test/results/clientpositive/spark/groupby10.q.out @@ -17,11 +17,11 @@ POSTHOOK: Output: default@dest2 PREHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT +PREHOOK: Output: default@input POSTHOOK: query: CREATE TABLE INPUT(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT +POSTHOOK: Output: default@input PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv5.txt' INTO TABLE INPUT PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/spark/groupby7.q.out ql/src/test/results/clientpositive/spark/groupby7.q.out index 3dd3a5e588..67b6ed1877 100644 --- ql/src/test/results/clientpositive/spark/groupby7.q.out +++ ql/src/test/results/clientpositive/spark/groupby7.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n132(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n132 +PREHOOK: Output: default@dest1_n132 POSTHOOK: query: CREATE TABLE DEST1_n132(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n132 +POSTHOOK: Output: default@dest1_n132 PREHOOK: query: CREATE TABLE DEST2_n34(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n34 +PREHOOK: Output: default@dest2_n34 POSTHOOK: query: CREATE TABLE DEST2_n34(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n34 +POSTHOOK: Output: default@dest2_n34 PREHOOK: query: FROM SRC INSERT OVERWRITE TABLE DEST1_n132 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key INSERT OVERWRITE TABLE DEST2_n34 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/spark/groupby7_map.q.out ql/src/test/results/clientpositive/spark/groupby7_map.q.out index ea85a33850..ba39928ceb 100644 --- ql/src/test/results/clientpositive/spark/groupby7_map.q.out +++ ql/src/test/results/clientpositive/spark/groupby7_map.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n82(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n82 +PREHOOK: Output: default@dest1_n82 POSTHOOK: query: CREATE TABLE DEST1_n82(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n82 +POSTHOOK: Output: default@dest1_n82 PREHOOK: query: CREATE TABLE DEST2_n19(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n19 +PREHOOK: Output: default@dest2_n19 POSTHOOK: query: CREATE TABLE DEST2_n19(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n19 +POSTHOOK: Output: default@dest2_n19 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n82 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out index d3a96c1ed9..1f57b710cf 100644 --- ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/spark/groupby7_map_multi_single_reducer.q.out @@ -1,19 +1,19 @@ PREHOOK: query: CREATE TABLE DEST1_n15(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST1_n15 +PREHOOK: Output: default@dest1_n15 POSTHOOK: query: CREATE TABLE DEST1_n15(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST1_n15 +POSTHOOK: Output: default@dest1_n15 PREHOOK: query: CREATE TABLE DEST2_n3(key INT, value STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@DEST2_n3 +PREHOOK: Output: default@dest2_n3 POSTHOOK: query: CREATE TABLE DEST2_n3(key INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@DEST2_n3 +POSTHOOK: Output: default@dest2_n3 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1_n15 SELECT SRC.key, sum(SUBSTR(SRC.value,5)) GROUP BY SRC.key diff --git ql/src/test/results/clientpositive/spark/input16_cc.q.out ql/src/test/results/clientpositive/spark/input16_cc.q.out index 195e82b301..0cf1e418a2 100644 --- ql/src/test/results/clientpositive/spark/input16_cc.q.out +++ ql/src/test/results/clientpositive/spark/input16_cc.q.out @@ -5,11 +5,11 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default -PREHOOK: Output: default@INPUT16_CC +PREHOOK: Output: default@input16_cc POSTHOOK: query: CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default -POSTHOOK: Output: default@INPUT16_CC +POSTHOOK: Output: default@input16_cc PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC PREHOOK: type: LOAD #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out index 981fa86d25..7bd9526aa7 100644 --- ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out +++ ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out @@ -174,7 +174,7 @@ POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: newdb@tab_n2 POSTHOOK: Output: newdb@tab_n2 Stage-0 - Rename Table{"table name:":"newDB.tab_n2","new table name:":"newDB.newName"} + Rename Table{"table name:":"newDB.tab_n2","new table name:":"hive.newDB.newName"} PREHOOK: query: drop table tab_n2 PREHOOK: type: DROPTABLE diff --git ql/src/test/results/clientpositive/tez/explainuser_3.q.out ql/src/test/results/clientpositive/tez/explainuser_3.q.out index d4374f02e3..96a2f60181 100644 --- ql/src/test/results/clientpositive/tez/explainuser_3.q.out +++ ql/src/test/results/clientpositive/tez/explainuser_3.q.out @@ -178,7 +178,7 @@ POSTHOOK: type: ALTERTABLE_RENAME POSTHOOK: Input: newdb@tab_n1 POSTHOOK: Output: newdb@tab_n1 Stage-0 - Rename Table{"table name:":"newDB.tab_n1","new table name:":"newDB.newName"} + Rename Table{"table name:":"newDB.tab_n1","new table name:":"hive.newDB.newName"} PREHOOK: query: explain drop table tab_n1 PREHOOK: type: DROPTABLE diff --git standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 205c867db1..60018a63d5 100644 --- standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -57,6 +57,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.metastore.api.*; @@ -2107,6 +2108,10 @@ public Table getTable(String catName, String dbName, String tableName, String va return deepCopy(FilterUtils.filterTableIfEnabled(isClientFilterEnabled, filterHook, t)); } + @Override public Table getTable(TableName tableName) throws MetaException, TException, NoSuchObjectException { + return getTable(tableName.getCat(), tableName.getDb(), tableName.getTable()); + } + @Override public List getTableObjectsByName(String dbName, List tableNames) throws TException { diff --git standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index b58b1e4a07..ae8c123c3c 100644 --- standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -29,6 +29,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.common.classification.RetrySemantics; @@ -681,6 +682,22 @@ Table getTable(String dbName, String tableName, boolean getColumnStats, String e */ Table getTable(String catName, String dbName, String tableName) throws MetaException, TException; + /** + * Get a table object in the default catalog. + * + * @param tableName + * The {@link org.apache.hadoop.hive.common.TableName} to fetch. + * @return An object representing the table. + * @throws MetaException + * Could not fetch the table + * @throws TException + * A thrift communication error occurred + * @throws NoSuchObjectException + * In case the table wasn't found. + */ + Table getTable(TableName tableName) throws MetaException, + TException, NoSuchObjectException; + /** * Get a table object. * @param catName catalog the table is in. diff --git standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java index 75a9368d8c..83d8cfa7a1 100755 --- standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -305,7 +305,7 @@ public static String getQualifiedName(Partition partition) { * @param table table object * @return fully qualified name. */ - public static String getCatalogQualifiedTableName(Table table) { + public static String getCatalogQualifiedTableName(Table table) { // TODO: deprecate/remove return TableName.getQualified(table.getCatName(), table.getDbName(), table.getTableName()); } diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Msck.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Msck.java index fab83b6501..838c2d1eb8 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Msck.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Msck.java @@ -110,15 +110,15 @@ public int repair(MsckInfo msckInfo) { int ret = 0; long partitionExpirySeconds = msckInfo.getPartitionExpirySeconds(); try { - Table table = getMsc().getTable(msckInfo.getCatalogName(), msckInfo.getDbName(), msckInfo.getTableName()); + Table table = getMsc().getTable(msckInfo.getTableName()); qualifiedTableName = Warehouse.getCatalogQualifiedTableName(table); HiveMetaStoreChecker checker = new HiveMetaStoreChecker(getMsc(), getConf(), partitionExpirySeconds); // checkMetastore call will fill in result with partitions that are present in filesystem // and missing in metastore - accessed through getPartitionsNotInMs // And partitions that are not present in filesystem and metadata exists in metastore - // accessed through getPartitionNotOnFS - checker.checkMetastore(msckInfo.getCatalogName(), msckInfo.getDbName(), msckInfo.getTableName(), - msckInfo.getPartSpecs(), result); + checker.checkMetastore(msckInfo.getTableName().getCat(), msckInfo.getTableName().getDb(), + msckInfo.getTableName().getTable(), msckInfo.getPartSpecs(), result); Set partsNotInMs = result.getPartitionsNotInMs(); Set partsNotInFs = result.getPartitionsNotOnFs(); Set expiredPartitions = result.getExpiredPartitions(); @@ -138,7 +138,8 @@ public int repair(MsckInfo msckInfo) { MetaStoreServerUtils.isTransactionalTable(table.getParameters())) { // Running MSCK from beeline/cli will make DDL task acquire X lock when repair is enabled, since we are directly // invoking msck.repair() without SQL statement, we need to do the same and acquire X lock (repair is default) - LockRequest lockRequest = createLockRequest(msckInfo.getDbName(), msckInfo.getTableName()); + LockRequest lockRequest = + createLockRequest(msckInfo.getTableName().getDb(), msckInfo.getTableName().getTable()); txnId = lockRequest.getTxnid(); try { LockResponse res = getMsc().lock(lockRequest); diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MsckInfo.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MsckInfo.java index 25d0c648ae..9c28133c93 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MsckInfo.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MsckInfo.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hive.metastore; +import org.apache.hadoop.hive.common.TableName; + import java.util.List; import java.util.Map; @@ -25,9 +27,7 @@ */ public class MsckInfo { - private final String catalogName; - private final String dbName; - private final String tableName; + private final TableName tableName; private final List> partSpecs; private final String resFile; private final boolean repairPartitions; @@ -35,11 +35,9 @@ private final boolean dropPartitions; private final long partitionExpirySeconds; - public MsckInfo(String catalogName, String dbName, String tableName, List> partSpecs, + public MsckInfo(TableName tableName, List> partSpecs, String resFile, boolean repairPartitions, boolean addPartitions, boolean dropPartitions, long partitionExpirySeconds) { - this.catalogName = catalogName; - this.dbName = dbName; this.tableName = tableName; this.partSpecs = partSpecs; this.resFile = resFile; @@ -49,15 +47,7 @@ public MsckInfo(String catalogName, String dbName, String tableName, List getTableObjectsByName(String catName, String dbName, List tableNames) throws MetaException, diff --git storage-api/src/java/org/apache/hadoop/hive/common/TableName.java storage-api/src/java/org/apache/hadoop/hive/common/TableName.java index a0f47c2ebf..aae55411ab 100644 --- storage-api/src/java/org/apache/hadoop/hive/common/TableName.java +++ storage-api/src/java/org/apache/hadoop/hive/common/TableName.java @@ -36,8 +36,11 @@ private final String cat; private final String db; private final String table; + private final boolean catIsBlank; + private final boolean dbIsBlank; /** + * Creates a TableName object, after applying lowercase to all params. * * @param catName catalog name. Cannot be null. If you do not know it you can get it from * SessionState.getCurrentCatalog() if you want to use the catalog from the current @@ -48,14 +51,39 @@ * @param tableName table name, cannot be null */ public TableName(final String catName, final String dbName, final String tableName) { - this.cat = catName; - this.db = dbName; - this.table = tableName; + if (tableName == null || tableName.trim().isEmpty()) { + throw new IllegalArgumentException(String.join("", "Table value was blank. ", ILL_ARG_EXCEPTION_MSG)); + } + + this.dbIsBlank = dbName == null || dbName.trim().isEmpty(); + this.catIsBlank = catName == null || catName.trim().isEmpty(); + if (dbIsBlank && !catIsBlank) { + throw new IllegalArgumentException( + String.join("", "Invalid values: database was blank, while category wasn't. ", ILL_ARG_EXCEPTION_MSG)); + } + + this.cat = this.catIsBlank ? catName : catName.toLowerCase(); + this.db = this.dbIsBlank ? dbName : dbName.toLowerCase(); + this.table = tableName.toLowerCase(); + } + + /** + * Build a TableName from a string of the form [database.]table. + * @param name name in string form, not null of the form [[catalog.]database.]table. + * @param defaultDatabase default database to use if database is not in the name. If you do + * not now it you can get it from SessionState.getCurrentDatabase() or + * use Warehouse.DEFAULT_DATABASE_NAME. + * @return TableName + * @throws IllegalArgumentException if a non-null name is given + */ + public static TableName fromString(final String name, final String defaultDatabase) + throws IllegalArgumentException { + return fromString(name, null, defaultDatabase); } /** * Build a TableName from a string of the form [[catalog.]database.]table. - * @param name name in string form, not null + * @param name name in string form, not null of the form [[catalog.]database.]table. * @param defaultCatalog default catalog to use if catalog is not in the name. If you do not * know it you can get it from SessionState.getCurrentCatalog() if you * want to use the catalog from the current session, or from @@ -102,9 +130,9 @@ public String getTable() { /** * Get the name in db.table format, for use with stuff not yet converted to use the catalog. * Fair warning, that if the db is null, this will return null.tableName - * @deprecated use {@link #getNotEmptyDbTable()} instead. + * @deprecated use {@link #toString()} instead. */ - // to be @Deprecated + // todo: remove, refactor public String getDbTable() { return db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; } @@ -114,13 +142,14 @@ public String getDbTable() { */ public String getEscapedNotEmptyDbTable() { return - db == null || db.trim().isEmpty() ? + dbIsBlank ? "`" + table + "`" : "`" + db + "`" + DatabaseName.CAT_DB_TABLE_SEPARATOR + "`" + table + "`"; } /** * Get the name in db.table format, if db is not empty, otherwise pass only the table name. */ + @Deprecated public String getNotEmptyDbTable() { return db == null || db.trim().isEmpty() ? table : db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; } @@ -128,10 +157,12 @@ public String getNotEmptyDbTable() { /** * Get the name in db.table format, for use with stuff not yet converted to use the catalog. */ + // todo: this can be quite unsafe public static String getDbTable(String dbName, String tableName) { return dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; } + // todo: this can be quite unsafe public static String getQualified(String catName, String dbName, String tableName) { return catName + DatabaseName.CAT_DB_TABLE_SEPARATOR + dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; } @@ -154,6 +185,10 @@ public static String getQualified(String catName, String dbName, String tableNam @Override public String toString() { - return cat + DatabaseName.CAT_DB_TABLE_SEPARATOR + db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + if (catIsBlank) { + return dbIsBlank ? table : db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + } else { + return cat + DatabaseName.CAT_DB_TABLE_SEPARATOR + db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + } } } diff --git storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java index f19c7358c9..872fb900f8 100644 --- storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java +++ storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java @@ -21,14 +21,51 @@ import org.junit.Test; public class TestTableName { + @Test - public void fullName() { - TableName name = new TableName("cat", "db", "t"); + public void testFullName() { + TableName name = new TableName("CaT", "dB", "TbL"); Assert.assertEquals("cat", name.getCat()); Assert.assertEquals("db", name.getDb()); - Assert.assertEquals("t", name.getTable()); - Assert.assertEquals("cat.db.t", name.toString()); - Assert.assertEquals("db.t", name.getDbTable()); + Assert.assertEquals("tbl", name.getTable()); + Assert.assertEquals("cat.db.tbl", name.toString()); + Assert.assertEquals("db.tbl", name.getDbTable()); + } + + @Test + public void testPartialName() { + TableName name = new TableName(null, "db", "t"); + Assert.assertEquals("db.t", name.toString()); + + name = new TableName(null, null, "t"); + Assert.assertEquals("t", name.toString()); + } + + @Test + public void testIllegalNames() { + try { + new TableName("cat", null, "t"); + Assert.fail(); + } catch (IllegalArgumentException e) { + } + + try { + new TableName("cat", "", "t"); + Assert.fail(); + } catch (IllegalArgumentException e) { + } + + try { + new TableName("cat", "db", null); + Assert.fail(); + } catch (IllegalArgumentException e) { + } + + try { + new TableName("cat", "db", ""); + Assert.fail(); + } catch (IllegalArgumentException e) { + } } @Test @@ -55,16 +92,4 @@ public void fromString() { Assert.assertTrue(true); } } - - @Test - public void testNotEmptyDbTable() { - TableName name = new TableName("cat", "db", "t"); - Assert.assertEquals("db.t", name.getNotEmptyDbTable()); - - name = new TableName("cat", null, "t"); - Assert.assertEquals("t", name.getNotEmptyDbTable()); - - name = new TableName("cat", "", "t"); - Assert.assertEquals("t", name.getNotEmptyDbTable()); - } }