diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java index ff0f210..172ff01 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java @@ -86,7 +86,7 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, "Operation not supported. Create table as " + "Select is not a valid operation."); - case HiveParser.TOK_TABLEBUCKETS: + case HiveParser.TOK_ALTERTABLE_BUCKETS: break; case HiveParser.TOK_LIKETABLE: diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index 4d338b5..6c54c05 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -71,7 +71,7 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) hook = new CreateDatabaseHook(); return hook.preAnalyze(context, ast); - case HiveParser.TOK_ALTERTABLE_PARTITION: + case HiveParser.TOK_ALTERTABLE: if (((ASTNode) ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { return ast; } else if (((ASTNode) ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_MERGEFILES) { @@ -163,7 +163,6 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, case HiveParser.TOK_CREATETABLE: case HiveParser.TOK_CREATEDATABASE: - case HiveParser.TOK_ALTERTABLE_PARTITION: // HCat will allow these operations to be performed. // Database DDL @@ -178,12 +177,20 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, case HiveParser.TOK_CREATEINDEX: case HiveParser.TOK_DROPINDEX: case HiveParser.TOK_SHOWINDEXES: + break; // View DDL //case HiveParser.TOK_ALTERVIEW_ADDPARTS: - case HiveParser.TOK_ALTERVIEW_DROPPARTS: - case HiveParser.TOK_ALTERVIEW_PROPERTIES: - case HiveParser.TOK_ALTERVIEW_RENAME: + case HiveParser.TOK_ALTERVIEW: + switch (ast.getChild(1).getType()) { + case HiveParser.TOK_ALTERVIEW_ADDPARTS: + case HiveParser.TOK_ALTERVIEW_DROPPARTS: + case HiveParser.TOK_ALTERVIEW_RENAME: + case HiveParser.TOK_ALTERVIEW_PROPERTIES: + case HiveParser.TOK_ALTERVIEW_DROPPROPERTIES: + } + break; + case HiveParser.TOK_CREATEVIEW: case HiveParser.TOK_DROPVIEW: @@ -205,20 +212,39 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, case HiveParser.TOK_DESCFUNCTION: case HiveParser.TOK_SHOWFUNCTIONS: case HiveParser.TOK_EXPLAIN: + break; // Table DDL - case HiveParser.TOK_ALTERTABLE_ADDPARTS: - case HiveParser.TOK_ALTERTABLE_ADDCOLS: - case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION: - case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: - case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: - case HiveParser.TOK_ALTERTABLE_DROPPARTS: - case HiveParser.TOK_ALTERTABLE_PROPERTIES: - case HiveParser.TOK_ALTERTABLE_RENAME: - case HiveParser.TOK_ALTERTABLE_RENAMECOL: - case HiveParser.TOK_ALTERTABLE_REPLACECOLS: - case HiveParser.TOK_ALTERTABLE_SERIALIZER: - case HiveParser.TOK_ALTERTABLE_TOUCH: + case HiveParser.TOK_ALTERTABLE: + switch (ast.getChild(1).getType()) { + case HiveParser.TOK_ALTERTABLE_ADDPARTS: + case HiveParser.TOK_ALTERTABLE_ADDCOLS: + case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION: + case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: + case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: + case HiveParser.TOK_ALTERTABLE_DROPPARTS: + case HiveParser.TOK_ALTERTABLE_PROPERTIES: + case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES: + case HiveParser.TOK_ALTERTABLE_RENAME: + case HiveParser.TOK_ALTERTABLE_RENAMECOL: + case HiveParser.TOK_ALTERTABLE_REPLACECOLS: + case HiveParser.TOK_ALTERTABLE_SERIALIZER: + case HiveParser.TOK_ALTERTABLE_TOUCH: + case HiveParser.TOK_ALTERTABLE_ARCHIVE: + case HiveParser.TOK_ALTERTABLE_UNARCHIVE: + case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION: + case HiveParser.TOK_ALTERTABLE_SKEWED: + case HiveParser.TOK_ALTERTABLE_FILEFORMAT: + case HiveParser.TOK_ALTERTABLE_PROTECTMODE: + case HiveParser.TOK_ALTERTABLE_LOCATION: + case HiveParser.TOK_ALTERTABLE_MERGEFILES: + case HiveParser.TOK_ALTERTABLE_RENAMEPART: + case HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION: + case HiveParser.TOK_ALTERTABLE_BUCKETS: + case HiveParser.TOK_ALTERTABLE_COMPACT: + } + break; + case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_SHOW_TABLESTATUS: diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java index 1e25ed3..606cb3a 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java @@ -156,7 +156,7 @@ public void testDatabaseOperations() throws MetaException, CommandNeedRetryExcep public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException { hcatDriver.run("drop table " + TBL_NAME); - hcatDriver.run("create table junit_sem_analysis (a int) stored as RCFILE"); + hcatDriver.run("create table " + TBL_NAME + " (a int) stored as RCFILE"); Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); List cols = tbl.getSd().getCols(); assertEquals(1, cols.size()); diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java index ae89182..0e8807e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java @@ -66,7 +66,7 @@ public void run(SessionState sess, Set inputs, case TABLE: { Table t = db.getTable(re.getTable().getTableName()); t.setLastAccessTime(lastAccessTime); - db.alterTable(t.getTableName(), t); + db.alterTable(t.getDbName() + "." + t.getTableName(), t); break; } case PARTITION: { @@ -76,7 +76,7 @@ public void run(SessionState sess, Set inputs, p.setLastAccessTime(lastAccessTime); db.alterPartition(t.getTableName(), p); t.setLastAccessTime(lastAccessTime); - db.alterTable(t.getTableName(), t); + db.alterTable(t.getDbName() + "." + t.getTableName(), t); break; } default: diff --git ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java index 1e01001..eeb343b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java @@ -77,7 +77,7 @@ protected int execute(DriverContext driverContext) { FileSystem fs = url.getFileSystem(conf); FileStatus fstat = fs.getFileStatus(url); tbl.getParameters().put(HiveIndex.INDEX_TABLE_CREATETIME, Long.toString(fstat.getModificationTime())); - db.alterTable(tbl.getTableName(), tbl); + db.alterTable(tbl.getDbName() + "." + tbl.getTableName(), tbl); } } catch (Exception e) { e.printStackTrace(); diff --git ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java index 27e251c..b076933 100644 --- ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java @@ -233,7 +233,8 @@ public void analyzeIndexDefinition(Table baseTable, Index index, StringBuilder command= new StringBuilder(); LinkedHashMap partSpec = indexTblPartDesc.getPartSpec(); - command.append("INSERT OVERWRITE TABLE " + HiveUtils.unparseIdentifier(indexTableName )); + command.append("INSERT OVERWRITE TABLE " + + HiveUtils.unparseIdentifier(dbName) + "." + HiveUtils.unparseIdentifier(indexTableName )); if (partitioned && indexTblPartDesc != null) { command.append(" PARTITION ( "); List ret = getPartKVPairStringArray(partSpec); @@ -257,7 +258,8 @@ public void analyzeIndexDefinition(Table baseTable, Index index, command.append("EWAH_BITMAP("); command.append(VirtualColumn.ROWOFFSET.getName()); command.append(")"); - command.append(" FROM " + HiveUtils.unparseIdentifier(baseTableName) ); + command.append(" FROM " + + HiveUtils.unparseIdentifier(dbName) + "." + HiveUtils.unparseIdentifier(baseTableName)); LinkedHashMap basePartSpec = baseTablePartDesc.getPartSpec(); if(basePartSpec != null) { command.append(" WHERE "); diff --git ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java index e7434a3..0ca5d22 100644 --- ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java @@ -103,7 +103,8 @@ public void analyzeIndexDefinition(Table baseTable, Index index, StringBuilder command= new StringBuilder(); LinkedHashMap partSpec = indexTblPartDesc.getPartSpec(); - command.append("INSERT OVERWRITE TABLE " + HiveUtils.unparseIdentifier(indexTableName )); + command.append("INSERT OVERWRITE TABLE " + + HiveUtils.unparseIdentifier(dbName) + "." + HiveUtils.unparseIdentifier(indexTableName )); if (partitioned && indexTblPartDesc != null) { command.append(" PARTITION ( "); List ret = getPartKVPairStringArray(partSpec); @@ -126,7 +127,8 @@ public void analyzeIndexDefinition(Table baseTable, Index index, command.append(" collect_set ("); command.append(VirtualColumn.BLOCKOFFSET.getName()); command.append(") "); - command.append(" FROM " + HiveUtils.unparseIdentifier(baseTableName) ); + command.append(" FROM " + + HiveUtils.unparseIdentifier(dbName) + "." + HiveUtils.unparseIdentifier(baseTableName)); LinkedHashMap basePartSpec = baseTablePartDesc.getPartSpec(); if(basePartSpec != null) { command.append(" WHERE "); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 60d490f..424ca50 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -646,6 +646,20 @@ public tableSpec(Hive db, HiveConf conf, ASTNode ast) this(db, conf, ast, true, false); } + public tableSpec(Hive db, HiveConf conf, String tableName, Map partSpec) + throws HiveException { + this.tableName = tableName; + this.partSpec = partSpec; + this.tableHandle = db.getTable(tableName); + if (partSpec != null) { + this.specType = SpecType.STATIC_PARTITION; + this.partHandle = db.getPartition(tableHandle, partSpec, false); + this.partitions = Arrays.asList(partHandle); + } else { + this.specType = SpecType.TABLE_ONLY; + } + } + public tableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartitionsSpec, boolean allowPartialPartitionsSpec) throws SemanticException { assert (ast.getToken().getType() == HiveParser.TOK_TAB @@ -1188,15 +1202,16 @@ protected Database getDatabase(String dbName) throws SemanticException { } protected Database getDatabase(String dbName, boolean throwException) throws SemanticException { + Database database; try { - Database database = db.getDatabase(dbName); - if (database == null && throwException) { - throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName)); - } - return database; - } catch (HiveException e) { + database = db.getDatabase(dbName); + } catch (Exception e) { throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName), e); } + if (database == null && throwException) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName)); + } + return database; } protected Table getTable(String[] qualified) throws SemanticException { @@ -1213,43 +1228,46 @@ protected Table getTable(String tblName, boolean throwException) throws Semantic protected Table getTable(String database, String tblName, boolean throwException) throws SemanticException { + Table tab; try { - Table tab = database == null ? db.getTable(tblName, false) + tab = database == null ? db.getTable(tblName, false) : db.getTable(database, tblName, false); - if (tab == null && throwException) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); - } - return tab; - } catch (HiveException e) { + } catch (Exception e) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName), e); } + if (tab == null && throwException) { + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); + } + return tab; } protected Partition getPartition(Table table, Map partSpec, boolean throwException) throws SemanticException { + Partition partition; try { - Partition partition = db.getPartition(table, partSpec, false); - if (partition == null && throwException) { - throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); - } - return partition; - } catch (HiveException e) { + partition = db.getPartition(table, partSpec, false); + } catch (Exception e) { throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e); } + if (partition == null && throwException) { + throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); + } + return partition; } protected List getPartitions(Table table, Map partSpec, boolean throwException) throws SemanticException { + List partitions; try { - List partitions = partSpec == null ? db.getPartitions(table) : + partitions = partSpec == null ? db.getPartitions(table) : db.getPartitions(table, partSpec); - if (partitions.isEmpty() && throwException) { - throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); - } - return partitions; - } catch (HiveException e) { + } catch (Exception e) { throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e); } + if (partitions.isEmpty() && throwException) { + throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); + } + return partitions; } protected String toMessage(ErrorMsg message, Object detail) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index f31a409..05cde3e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -249,39 +249,67 @@ public DDLSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException { } @Override - public void analyzeInternal(ASTNode ast) throws SemanticException { - - switch (ast.getToken().getType()) { - case HiveParser.TOK_ALTERTABLE_PARTITION: { - ASTNode tablePart = (ASTNode) ast.getChild(0); - TablePartition tblPart = new TablePartition(tablePart); - String tableName = tblPart.tableName; - HashMap partSpec = tblPart.partSpec; - ast = (ASTNode) ast.getChild(1); - if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { + public void analyzeInternal(ASTNode input) throws SemanticException { + + ASTNode ast = input; + switch (ast.getType()) { + case HiveParser.TOK_ALTERTABLE: { + ast = (ASTNode) input.getChild(1); + String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0)); + String tableName = getDotName(qualified); + HashMap partSpec = DDLSemanticAnalyzer.getPartSpec((ASTNode) input.getChild(2)); + if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAME) { + analyzeAlterTableRename(qualified, ast, false); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { + analyzeAlterTableTouch(qualified, ast); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ARCHIVE) { + analyzeAlterTableArchive(qualified, ast, false); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { + analyzeAlterTableArchive(qualified, ast, true); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { + analyzeAlterTableModifyCols(qualified, ast, AlterTableTypes.ADDCOLS); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { + analyzeAlterTableModifyCols(qualified, ast, AlterTableTypes.REPLACECOLS); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { + analyzeAlterTableRenameCol(qualified, ast); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) { + analyzeAlterTableAddParts(qualified, ast, false); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) { + analyzeAlterTableDropParts(qualified, ast, false); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PARTCOLTYPE) { + analyzeAlterTablePartColType(qualified, ast); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) { + analyzeAlterTableProps(qualified, ast, false, false); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPROPERTIES) { + analyzeAlterTableProps(qualified, ast, false, true); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_SKEWED) { + analyzeAltertableSkewedby(qualified, ast); + } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION) { + analyzeExchangePartition(qualified, ast); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { analyzeAlterTableFileFormat(ast, tableName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PROTECTMODE) { analyzeAlterTableProtectMode(ast, tableName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_LOCATION) { analyzeAlterTableLocation(ast, tableName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_MERGEFILES) { - analyzeAlterTablePartMergeFiles(tablePart, ast, tableName, partSpec); + analyzeAlterTablePartMergeFiles(ast, tableName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) { analyzeAlterTableSerde(ast, tableName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) { analyzeAlterTableSerdeProps(ast, tableName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) { analyzeAlterTableRenamePart(ast, tableName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTBLPART_SKEWED_LOCATION) { + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION) { analyzeAlterTableSkewedLocation(ast, tableName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_TABLEBUCKETS) { + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_BUCKETS) { analyzeAlterTableBucketNum(ast, tableName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) { analyzeAlterTableClusterSort(ast, tableName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_COMPACT) { + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_COMPACT) { analyzeAlterTableCompact(ast, tableName, partSpec); } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS){ - analyzeAlterTableUpdateStats(ast,tblPart); + analyzeAlterTableUpdateStats(ast, tableName, partSpec); } break; } @@ -360,66 +388,22 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { case HiveParser.TOK_DROPVIEW: analyzeDropTable(ast, true); break; - case HiveParser.TOK_ALTERVIEW_PROPERTIES: - analyzeAlterTableProps(ast, true, false); - break; - case HiveParser.TOK_DROPVIEW_PROPERTIES: - analyzeAlterTableProps(ast, true, true); - break; - case HiveParser.TOK_ALTERVIEW_ADDPARTS: - // for ALTER VIEW ADD PARTITION, we wrapped the ADD to discriminate - // view from table; unwrap it now - analyzeAlterTableAddParts((ASTNode) ast.getChild(0), true); - break; - case HiveParser.TOK_ALTERVIEW_DROPPARTS: - // for ALTER VIEW DROP PARTITION, we wrapped the DROP to discriminate - // view from table; unwrap it now - analyzeAlterTableDropParts((ASTNode) ast.getChild(0), true); - break; - case HiveParser.TOK_ALTERVIEW_RENAME: - // for ALTER VIEW RENAME, we wrapped the RENAME to discriminate - // view from table; unwrap it now - analyzeAlterTableRename(((ASTNode) ast.getChild(0)), true); - break; - case HiveParser.TOK_ALTERTABLE_RENAME: - analyzeAlterTableRename(ast, false); - break; - case HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS: - analyzeAlterTableUpdateStats(ast, null); - break; - case HiveParser.TOK_ALTERTABLE_TOUCH: - analyzeAlterTableTouch(ast); - break; - case HiveParser.TOK_ALTERTABLE_ARCHIVE: - analyzeAlterTableArchive(ast, false); - break; - case HiveParser.TOK_ALTERTABLE_UNARCHIVE: - analyzeAlterTableArchive(ast, true); - break; - case HiveParser.TOK_ALTERTABLE_ADDCOLS: - analyzeAlterTableModifyCols(ast, AlterTableTypes.ADDCOLS); - break; - case HiveParser.TOK_ALTERTABLE_REPLACECOLS: - analyzeAlterTableModifyCols(ast, AlterTableTypes.REPLACECOLS); - break; - case HiveParser.TOK_ALTERTABLE_RENAMECOL: - analyzeAlterTableRenameCol(ast); - break; - case HiveParser.TOK_ALTERTABLE_ADDPARTS: - analyzeAlterTableAddParts(ast, false); - break; - case HiveParser.TOK_ALTERTABLE_DROPPARTS: - analyzeAlterTableDropParts(ast, false); - break; - case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE: - analyzeAlterTablePartColType(ast); - break; - case HiveParser.TOK_ALTERTABLE_PROPERTIES: - analyzeAlterTableProps(ast, false, false); - break; - case HiveParser.TOK_DROPTABLE_PROPERTIES: - analyzeAlterTableProps(ast, false, true); + case HiveParser.TOK_ALTERVIEW: { + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + ast = (ASTNode) ast.getChild(1); + if (ast.getType() == HiveParser.TOK_ALTERVIEW_PROPERTIES) { + analyzeAlterTableProps(qualified, ast, true, false); + } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_DROPPROPERTIES) { + analyzeAlterTableProps(qualified, ast, true, true); + } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_ADDPARTS) { + analyzeAlterTableAddParts(qualified, ast, true); + } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_DROPPARTS) { + analyzeAlterTableDropParts(qualified, ast, true); + } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_RENAME) { + analyzeAlterTableRename(qualified, ast, true); + } break; + } case HiveParser.TOK_ALTERINDEX_REBUILD: analyzeAlterIndexRebuild(ast); break; @@ -499,12 +483,6 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { case HiveParser.TOK_REVOKE: analyzeRevoke(ast); break; - case HiveParser.TOK_ALTERTABLE_SKEWED: - analyzeAltertableSkewedby(ast); - break; - case HiveParser.TOK_EXCHANGEPARTITION: - analyzeExchangePartition(ast); - break; case HiveParser.TOK_SHOW_SET_ROLE: analyzeSetShowRole(ast); break; @@ -516,20 +494,14 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { } } - private void analyzeAlterTableUpdateStats(ASTNode ast, TablePartition tblPart) + private void analyzeAlterTableUpdateStats(ASTNode ast, String tblName, Map partSpec) throws SemanticException { - String tblName = null; - String colName = null; - Map mapProp = null; - Map partSpec = null; + String colName = getUnescapedName((ASTNode) ast.getChild(0)); + Map mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0)); + + Table tbl = getTable(tblName); String partName = null; - if (tblPart == null) { - tblName = getUnescapedName((ASTNode) ast.getChild(0)); - colName = getUnescapedName((ASTNode) ast.getChild(1)); - mapProp = getProps((ASTNode) (ast.getChild(2)).getChild(0)); - } else { - tblName = tblPart.tableName; - partSpec = tblPart.partSpec; + if (partSpec != null) { try { partName = Warehouse.makePartName(partSpec, false); } catch (MetaException e) { @@ -537,15 +509,6 @@ private void analyzeAlterTableUpdateStats(ASTNode ast, TablePartition tblPart) throw new SemanticException("partition " + partSpec.toString() + " not found"); } - colName = getUnescapedName((ASTNode) ast.getChild(0)); - mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0)); - } - - Table tbl = null; - try { - tbl = db.getTable(tblName); - } catch (HiveException e) { - throw new SemanticException("table " + tbl + " not found"); } String colType = null; @@ -711,12 +674,12 @@ private void analyzeAlterDatabaseOwner(ASTNode ast) throws SemanticException { addAlterDbDesc(alterDesc); } - private void analyzeExchangePartition(ASTNode ast) throws SemanticException { - Table destTable = getTable(getUnescapedName((ASTNode)ast.getChild(0))); - Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(2))); + private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws SemanticException { + Table destTable = getTable(qualified); + Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(1))); // Get the partition specs - Map partSpecs = getPartSpec((ASTNode) ast.getChild(1)); + Map partSpecs = getPartSpec((ASTNode) ast.getChild(0)); validatePartitionValues(partSpecs); boolean sameColumns = MetaStoreUtils.compareFieldColumns( destTable.getAllCols(), sourceTable.getAllCols()); @@ -1237,8 +1200,7 @@ private void analyzeAlterIndexProps(ASTNode ast) if (indexTableName != null) { indexTbl = getTable(Utilities.getDbTableName(index.getDbName(), indexTableName)); } - String baseTblName = index.getOrigTableName(); - Table baseTbl = getTable(baseTblName); + Table baseTbl = getTable(new String[] {index.getDbName(), index.getOrigTableName()}); String handlerCls = index.getIndexHandlerClass(); HiveIndexHandler handler = HiveUtils.getIndexHandler(conf, handlerCls); @@ -1331,16 +1293,16 @@ private void validateAlterTableType(Table tbl, AlterTableTypes op, boolean expec } } - private void analyzeAlterTableProps(ASTNode ast, boolean expectView, boolean isUnset) + private void analyzeAlterTableProps(String[] qualified, ASTNode ast, boolean expectView, boolean isUnset) throws SemanticException { - String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - HashMap mapProp = getProps((ASTNode) (ast.getChild(1)) + String tableName = getDotName(qualified); + HashMap mapProp = getProps((ASTNode) (ast.getChild(0)) .getChild(0)); AlterTableDesc alterTblDesc = null; if (isUnset == true) { alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, expectView); - if (ast.getChild(2) != null) { + if (ast.getChild(1) != null) { alterTblDesc.setDropIfExists(true); } } else { @@ -1527,7 +1489,7 @@ private void analyzeAlterTableProtectMode(ASTNode ast, String tableName, alterTblDesc), conf)); } - private void analyzeAlterTablePartMergeFiles(ASTNode tablePartAST, ASTNode ast, + private void analyzeAlterTablePartMergeFiles(ASTNode ast, String tableName, HashMap partSpec) throws SemanticException { AlterTablePartMergeFilesDesc mergeDesc = new AlterTablePartMergeFilesDesc( @@ -1639,7 +1601,7 @@ private void analyzeAlterTablePartMergeFiles(ASTNode tablePartAST, ASTNode ast, StatsWork statDesc; if (oldTblPartLoc.equals(newTblPartLoc)) { // If we're merging to the same location, we can avoid some metastore calls - tableSpec tablepart = new tableSpec(this.db, conf, tablePartAST); + tableSpec tablepart = new tableSpec(db, conf, tableName, partSpec); statDesc = new StatsWork(tablepart); } else { statDesc = new StatsWork(ltd); @@ -1672,7 +1634,7 @@ private void analyzeAlterTableClusterSort(ASTNode ast, String tableName, alterTblDesc = new AlterTableDesc(tableName, true, partSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); break; - case HiveParser.TOK_TABLEBUCKETS: + case HiveParser.TOK_ALTERTABLE_BUCKETS: ASTNode buckets = (ASTNode) ast.getChild(0); List bucketCols = getColumnNames((ASTNode) buckets.getChild(0)); List sortCols = new ArrayList(); @@ -2502,9 +2464,9 @@ private void analyzeDescFunction(ASTNode ast) throws SemanticException { } - private void analyzeAlterTableRename(ASTNode ast, boolean expectView) throws SemanticException { - String[] source = getQualifiedTableName((ASTNode) ast.getChild(0)); - String[] target = getQualifiedTableName((ASTNode) ast.getChild(1)); + private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expectView) + throws SemanticException { + String[] target = getQualifiedTableName((ASTNode) ast.getChild(0)); String sourceName = getDotName(source); String targetName = getDotName(target); @@ -2515,22 +2477,21 @@ private void analyzeAlterTableRename(ASTNode ast, boolean expectView) throws Sem alterTblDesc), conf)); } - private void analyzeAlterTableRenameCol(ASTNode ast) throws SemanticException { - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast) throws SemanticException { String newComment = null; String newType = null; - newType = getTypeStringFromAST((ASTNode) ast.getChild(3)); + newType = getTypeStringFromAST((ASTNode) ast.getChild(2)); boolean first = false; String flagCol = null; ASTNode positionNode = null; - if (ast.getChildCount() == 6) { - newComment = unescapeSQLString(ast.getChild(4).getText()); - positionNode = (ASTNode) ast.getChild(5); - } else if (ast.getChildCount() == 5) { - if (ast.getChild(4).getType() == HiveParser.StringLiteral) { - newComment = unescapeSQLString(ast.getChild(4).getText()); + if (ast.getChildCount() == 5) { + newComment = unescapeSQLString(ast.getChild(3).getText()); + positionNode = (ASTNode) ast.getChild(4); + } else if (ast.getChildCount() == 4) { + if (ast.getChild(3).getType() == HiveParser.StringLiteral) { + newComment = unescapeSQLString(ast.getChild(3).getText()); } else { - positionNode = (ASTNode) ast.getChild(4); + positionNode = (ASTNode) ast.getChild(3); } } @@ -2542,8 +2503,8 @@ private void analyzeAlterTableRenameCol(ASTNode ast) throws SemanticException { } } - String oldColName = ast.getChild(1).getText(); - String newColName = ast.getChild(2).getText(); + String oldColName = ast.getChild(0).getText(); + String newColName = ast.getChild(1).getText(); /* Validate the operation of renaming a column name. */ Table tab = getTable(qualified); @@ -2603,12 +2564,11 @@ private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, alterBucketNum), conf)); } - private void analyzeAlterTableModifyCols(ASTNode ast, + private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast, AlterTableTypes alterType) throws SemanticException { - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); String tblName = getDotName(qualified); - List newCols = getColumns((ASTNode) ast.getChild(1)); + List newCols = getColumns((ASTNode) ast.getChild(0)); AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols, alterType); @@ -2617,7 +2577,7 @@ private void analyzeAlterTableModifyCols(ASTNode ast, alterTblDesc), conf)); } - private void analyzeAlterTableDropParts(ASTNode ast, boolean expectView) + private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean expectView) throws SemanticException { boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null) @@ -2630,7 +2590,6 @@ private void analyzeAlterTableDropParts(ASTNode ast, boolean expectView) // popular case but that's kinda hacky. Let's not do it for now. boolean canGroupExprs = ifExists; - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); Table tab = getTable(qualified); Map> partSpecs = getFullPartitionSpecs(ast, tab, canGroupExprs); @@ -2649,10 +2608,8 @@ private void analyzeAlterTableDropParts(ASTNode ast, boolean expectView) rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf)); } - private void analyzeAlterTablePartColType(ASTNode ast) + private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) throws SemanticException { - // get table name - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); // check if table exists. @@ -2664,7 +2621,7 @@ private void analyzeAlterTablePartColType(ASTNode ast) // Alter table ... partition column ( column newtype) only takes one column at a time. // It must have a column name followed with type. - ASTNode colAst = (ASTNode) ast.getChild(1); + ASTNode colAst = (ASTNode) ast.getChild(0); assert(colAst.getChildCount() == 2); FieldSchema newCol = new FieldSchema(); @@ -2710,12 +2667,11 @@ private void analyzeAlterTablePartColType(ASTNode ast) * @throws SemanticException * Parsing failed */ - private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView) + private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boolean expectView) throws SemanticException { // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+) - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); - boolean ifNotExists = ast.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS; + boolean ifNotExists = ast.getChild(0).getType() == HiveParser.TOK_IFNOTEXISTS; Table tab = getTable(qualified); boolean isView = tab.isView(); @@ -2723,7 +2679,7 @@ private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView) outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED)); int numCh = ast.getChildCount(); - int start = ifNotExists ? 2 : 1; + int start = ifNotExists ? 1 : 0; String currentLocation = null; Map currentPart = null; @@ -2840,9 +2796,8 @@ private Partition getPartitionForOutput(Table tab, Map currentPa * @throws SemanticException * Parsin failed */ - private void analyzeAlterTableTouch(CommonTree ast) + private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) throws SemanticException { - String[] qualified = getQualifiedTableName((ASTNode)ast.getChild(0)); Table tab = getTable(qualified); validateAlterTableType(tab, AlterTableTypes.TOUCH); @@ -2870,14 +2825,13 @@ private void analyzeAlterTableTouch(CommonTree ast) } } - private void analyzeAlterTableArchive(CommonTree ast, boolean isUnArchive) + private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolean isUnArchive) throws SemanticException { if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) { throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg()); } - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); // partition name to value List> partSpecs = getPartitionSpecs(ast); @@ -2948,7 +2902,7 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { List> partSpecs = new ArrayList>(); int childIndex = 0; // get partition metadata if partition specified - for (childIndex = 1; childIndex < ast.getChildCount(); childIndex++) { + for (childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { Tree partspec = ast.getChild(childIndex); // sanity check if (partspec.getType() == HiveParser.TOK_PARTSPEC) { @@ -2976,7 +2930,7 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { Map> result = new HashMap>(); - for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) { + for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { Tree partSpecTree = ast.getChild(childIndex); if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) continue; ExprNodeGenericFuncDesc expr = null; @@ -3184,14 +3138,13 @@ private void addTableDropPartsOutputs(Table tab, * node * @throws SemanticException */ - private void analyzeAltertableSkewedby(ASTNode ast) throws SemanticException { + private void analyzeAltertableSkewedby(String[] qualified, ASTNode ast) throws SemanticException { /** * Throw an error if the user tries to use the DDL with * hive.internal.ddl.list.bucketing.enable set to false. */ HiveConf hiveConf = SessionState.get().getConf(); - String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); Table tab = getTable(qualified); inputs.add(new ReadEntity(tab)); @@ -3200,7 +3153,7 @@ private void analyzeAltertableSkewedby(ASTNode ast) throws SemanticException { validateAlterTableType(tab, AlterTableTypes.ADDSKEWEDBY); String tableName = getDotName(qualified); - if (ast.getChildCount() == 1) { + if (ast.getChildCount() == 0) { /* Convert a skewed table to non-skewed table. */ AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, true, new ArrayList(), new ArrayList>()); @@ -3208,7 +3161,7 @@ private void analyzeAltertableSkewedby(ASTNode ast) throws SemanticException { rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } else { - switch (((ASTNode) ast.getChild(1)).getToken().getType()) { + switch (((ASTNode) ast.getChild(0)).getToken().getType()) { case HiveParser.TOK_TABLESKEWED: handleAlterTableSkewedBy(ast, tableName, tab); break; @@ -3255,7 +3208,7 @@ private void handleAlterTableSkewedBy(ASTNode ast, String tableName, Table tab) List skewedColNames = new ArrayList(); List> skewedValues = new ArrayList>(); /* skewed column names. */ - ASTNode skewedNode = (ASTNode) ast.getChild(1); + ASTNode skewedNode = (ASTNode) ast.getChild(0); skewedColNames = analyzeSkewedTablDDLColNames(skewedColNames, skewedNode); /* skewed value. */ analyzeDDLSkewedValues(skewedValues, skewedNode); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index a76cad7..bf5c873 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -126,11 +126,10 @@ TOK_CREATEINDEX; TOK_CREATEINDEX_INDEXTBLNAME; TOK_DEFERRED_REBUILDINDEX; TOK_DROPINDEX; -TOK_DROPTABLE_PROPERTIES; TOK_LIKETABLE; TOK_DESCTABLE; TOK_DESCFUNCTION; -TOK_ALTERTABLE_PARTITION; +TOK_ALTERTABLE; TOK_ALTERTABLE_RENAME; TOK_ALTERTABLE_ADDCOLS; TOK_ALTERTABLE_RENAMECOL; @@ -152,6 +151,13 @@ TOK_ALTERTABLE_FILEFORMAT; TOK_ALTERTABLE_LOCATION; TOK_ALTERTABLE_PROPERTIES; TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION; +TOK_ALTERTABLE_DROPPROPERTIES; +TOK_ALTERTABLE_SKEWED; +TOK_ALTERTABLE_EXCHANGEPARTITION; +TOK_ALTERTABLE_SKEWED_LOCATION; +TOK_ALTERTABLE_BUCKETS; +TOK_ALTERTABLE_CLUSTER_SORT; +TOK_ALTERTABLE_COMPACT; TOK_ALTERINDEX_REBUILD; TOK_ALTERINDEX_PROPERTIES; TOK_MSCK; @@ -177,7 +183,6 @@ TOK_TABCOLLIST; TOK_TABCOL; TOK_TABLECOMMENT; TOK_TABLEPARTCOLS; -TOK_TABLEBUCKETS; TOK_TABLEROWFORMAT; TOK_TABLEROWFORMATFIELD; TOK_TABLEROWFORMATCOLLITEMS; @@ -192,7 +197,6 @@ TOK_DISABLE; TOK_READONLY; TOK_NO_DROP; TOK_STORAGEHANDLER; -TOK_ALTERTABLE_CLUSTER_SORT; TOK_NOT_CLUSTERED; TOK_NOT_SORTED; TOK_TABCOLNAME; @@ -215,9 +219,9 @@ TOK_DROPMACRO; TOK_TEMPORARY; TOK_CREATEVIEW; TOK_DROPVIEW; -TOK_ALTERVIEW_AS; +TOK_ALTERVIEW; TOK_ALTERVIEW_PROPERTIES; -TOK_DROPVIEW_PROPERTIES; +TOK_ALTERVIEW_DROPPROPERTIES; TOK_ALTERVIEW_ADDPARTS; TOK_ALTERVIEW_DROPPARTS; TOK_ALTERVIEW_RENAME; @@ -302,8 +306,6 @@ TOK_TABLESKEWED; TOK_TABCOLVALUE; TOK_TABCOLVALUE_PAIR; TOK_TABCOLVALUES; -TOK_ALTERTABLE_SKEWED; -TOK_ALTERTBLPART_SKEWED_LOCATION; TOK_SKEWED_LOCATIONS; TOK_SKEWED_LOCATION_LIST; TOK_SKEWED_LOCATION_MAP; @@ -315,7 +317,6 @@ TOK_WINDOWSPEC; TOK_WINDOWVALUES; TOK_WINDOWRANGE; TOK_IGNOREPROTECTION; -TOK_EXCHANGEPARTITION; TOK_SUBQUERY_EXPR; TOK_SUBQUERY_OP; TOK_SUBQUERY_OP_NOTIN; @@ -328,7 +329,6 @@ TOK_FILE; TOK_JAR; TOK_RESOURCE_URI; TOK_RESOURCE_LIST; -TOK_COMPACT; TOK_SHOW_COMPACTIONS; TOK_SHOW_TRANSACTIONS; } @@ -921,56 +921,62 @@ dropTableStatement alterStatement @init { pushMsg("alter statement", state); } @after { popMsg(state); } - : KW_ALTER! - ( - KW_TABLE! alterTableStatementSuffix - | - KW_VIEW! alterViewStatementSuffix - | - KW_INDEX! alterIndexStatementSuffix - | - (KW_DATABASE|KW_SCHEMA)! alterDatabaseStatementSuffix - ) + : KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^(TOK_ALTERTABLE tableName alterTableStatementSuffix) + | KW_ALTER KW_VIEW tableName KW_AS? alterViewStatementSuffix -> ^(TOK_ALTERVIEW tableName alterViewStatementSuffix) + | KW_ALTER KW_INDEX alterIndexStatementSuffix -> alterIndexStatementSuffix + | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix ; alterTableStatementSuffix @init { pushMsg("alter table statement", state); } @after { popMsg(state); } - : alterStatementSuffixRename + : alterStatementSuffixRename[true] | alterStatementSuffixAddCol | alterStatementSuffixRenameCol | alterStatementSuffixUpdateStatsCol - | alterStatementSuffixDropPartitions - | alterStatementSuffixAddPartitions + | alterStatementSuffixDropPartitions[true] + | alterStatementSuffixAddPartitions[true] | alterStatementSuffixTouch | alterStatementSuffixArchive | alterStatementSuffixUnArchive | alterStatementSuffixProperties - | alterTblPartitionStatement | alterStatementSuffixSkewedby | alterStatementSuffixExchangePartition | alterStatementPartitionKeyType + | partitionSpec? alterTblPartitionStatementSuffix -> alterTblPartitionStatementSuffix partitionSpec? ; +alterTblPartitionStatementSuffix +@init {pushMsg("alter table partition statement suffix", state);} +@after {popMsg(state);} + : alterStatementSuffixFileFormat + | alterStatementSuffixLocation + | alterStatementSuffixProtectMode + | alterStatementSuffixMergeFiles + | alterStatementSuffixSerdeProperties + | alterStatementSuffixRenamePart + | alterStatementSuffixBucketNum + | alterTblPartitionStatementSuffixSkewedLocation + | alterStatementSuffixClusterbySortby + | alterStatementSuffixCompact + | alterStatementSuffixUpdateStatsCol + ; + alterStatementPartitionKeyType @init {msgs.push("alter partition key type"); } @after {msgs.pop();} - : tableName KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN - -> ^(TOK_ALTERTABLE_PARTCOLTYPE tableName columnNameType) + : KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN + -> ^(TOK_ALTERTABLE_PARTCOLTYPE columnNameType) ; alterViewStatementSuffix @init { pushMsg("alter view statement", state); } @after { popMsg(state); } : alterViewSuffixProperties - | alterStatementSuffixRename - -> ^(TOK_ALTERVIEW_RENAME alterStatementSuffixRename) - | alterStatementSuffixAddPartitions - -> ^(TOK_ALTERVIEW_ADDPARTS alterStatementSuffixAddPartitions) - | alterStatementSuffixDropPartitions - -> ^(TOK_ALTERVIEW_DROPPARTS alterStatementSuffixDropPartitions) - | name=tableName KW_AS selectStatementWithCTE - -> ^(TOK_ALTERVIEW_AS $name selectStatementWithCTE) + | alterStatementSuffixRename[false] + | alterStatementSuffixAddPartitions[false] + | alterStatementSuffixDropPartitions[false] + | selectStatementWithCTE ; alterIndexStatementSuffix @@ -1008,33 +1014,34 @@ alterDatabaseSuffixSetOwner -> ^(TOK_ALTERDATABASE_OWNER $dbName principalName) ; -alterStatementSuffixRename +alterStatementSuffixRename[boolean table] @init { pushMsg("rename statement", state); } @after { popMsg(state); } - : oldName=tableName KW_RENAME KW_TO newName=tableName - -> ^(TOK_ALTERTABLE_RENAME $oldName $newName) + : KW_RENAME KW_TO tableName + -> { table }? ^(TOK_ALTERTABLE_RENAME tableName) + -> ^(TOK_ALTERVIEW_RENAME tableName) ; alterStatementSuffixAddCol @init { pushMsg("add column statement", state); } @after { popMsg(state); } - : tableName (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN - -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS tableName columnNameTypeList) - -> ^(TOK_ALTERTABLE_REPLACECOLS tableName columnNameTypeList) + : (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN + -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS columnNameTypeList) + -> ^(TOK_ALTERTABLE_REPLACECOLS columnNameTypeList) ; alterStatementSuffixRenameCol @init { pushMsg("rename column name", state); } @after { popMsg(state); } - : tableName KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition? - ->^(TOK_ALTERTABLE_RENAMECOL tableName $oldName $newName colType $comment? alterStatementChangeColPosition?) + : KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition? + ->^(TOK_ALTERTABLE_RENAMECOL $oldName $newName colType $comment? alterStatementChangeColPosition?) ; alterStatementSuffixUpdateStatsCol @init { pushMsg("update column statistics", state); } @after { popMsg(state); } - : identifier KW_UPDATE KW_STATISTICS KW_FOR KW_COLUMN? colName=identifier KW_SET tableProperties (KW_COMMENT comment=StringLiteral)? - ->^(TOK_ALTERTABLE_UPDATECOLSTATS identifier $colName tableProperties $comment?) + : KW_UPDATE KW_STATISTICS KW_FOR KW_COLUMN? colName=identifier KW_SET tableProperties (KW_COMMENT comment=StringLiteral)? + ->^(TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties $comment?) ; alterStatementChangeColPosition @@ -1043,11 +1050,12 @@ alterStatementChangeColPosition -> ^(TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol) ; -alterStatementSuffixAddPartitions +alterStatementSuffixAddPartitions[boolean table] @init { pushMsg("add partition statement", state); } @after { popMsg(state); } - : tableName KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+ - -> ^(TOK_ALTERTABLE_ADDPARTS tableName ifNotExists? alterStatementSuffixAddPartitionsElement+) + : KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+ + -> { table }? ^(TOK_ALTERTABLE_ADDPARTS ifNotExists? alterStatementSuffixAddPartitionsElement+) + -> ^(TOK_ALTERVIEW_ADDPARTS ifNotExists? alterStatementSuffixAddPartitionsElement+) ; alterStatementSuffixAddPartitionsElement @@ -1057,22 +1065,22 @@ alterStatementSuffixAddPartitionsElement alterStatementSuffixTouch @init { pushMsg("touch statement", state); } @after { popMsg(state); } - : tableName KW_TOUCH (partitionSpec)* - -> ^(TOK_ALTERTABLE_TOUCH tableName (partitionSpec)*) + : KW_TOUCH (partitionSpec)* + -> ^(TOK_ALTERTABLE_TOUCH (partitionSpec)*) ; alterStatementSuffixArchive @init { pushMsg("archive statement", state); } @after { popMsg(state); } - : tableName KW_ARCHIVE (partitionSpec)* - -> ^(TOK_ALTERTABLE_ARCHIVE tableName (partitionSpec)*) + : KW_ARCHIVE (partitionSpec)* + -> ^(TOK_ALTERTABLE_ARCHIVE (partitionSpec)*) ; alterStatementSuffixUnArchive @init { pushMsg("unarchive statement", state); } @after { popMsg(state); } - : tableName KW_UNARCHIVE (partitionSpec)* - -> ^(TOK_ALTERTABLE_UNARCHIVE tableName (partitionSpec)*) + : KW_UNARCHIVE (partitionSpec)* + -> ^(TOK_ALTERTABLE_UNARCHIVE (partitionSpec)*) ; partitionLocation @@ -1082,29 +1090,30 @@ partitionLocation KW_LOCATION locn=StringLiteral -> ^(TOK_PARTITIONLOCATION $locn) ; -alterStatementSuffixDropPartitions +alterStatementSuffixDropPartitions[boolean table] @init { pushMsg("drop partition statement", state); } @after { popMsg(state); } - : tableName KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection? - -> ^(TOK_ALTERTABLE_DROPPARTS tableName dropPartitionSpec+ ifExists? ignoreProtection?) + : KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection? + -> { table }? ^(TOK_ALTERTABLE_DROPPARTS dropPartitionSpec+ ifExists? ignoreProtection?) + -> ^(TOK_ALTERVIEW_DROPPARTS dropPartitionSpec+ ifExists? ignoreProtection?) ; alterStatementSuffixProperties @init { pushMsg("alter properties statement", state); } @after { popMsg(state); } - : tableName KW_SET KW_TBLPROPERTIES tableProperties - -> ^(TOK_ALTERTABLE_PROPERTIES tableName tableProperties) - | tableName KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties - -> ^(TOK_DROPTABLE_PROPERTIES tableName tableProperties ifExists?) + : KW_SET KW_TBLPROPERTIES tableProperties + -> ^(TOK_ALTERTABLE_PROPERTIES tableProperties) + | KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties + -> ^(TOK_ALTERTABLE_DROPPROPERTIES tableProperties ifExists?) ; alterViewSuffixProperties @init { pushMsg("alter view properties statement", state); } @after { popMsg(state); } - : tableName KW_SET KW_TBLPROPERTIES tableProperties - -> ^(TOK_ALTERVIEW_PROPERTIES tableName tableProperties) - | tableName KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties - -> ^(TOK_DROPVIEW_PROPERTIES tableName tableProperties ifExists?) + : KW_SET KW_TBLPROPERTIES tableProperties + -> ^(TOK_ALTERVIEW_PROPERTIES tableProperties) + | KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties + -> ^(TOK_ALTERVIEW_DROPPROPERTIES tableProperties ifExists?) ; alterStatementSuffixSerdeProperties @@ -1123,29 +1132,6 @@ tablePartitionPrefix ->^(TOK_TABLE_PARTITION tableName partitionSpec?) ; -alterTblPartitionStatement -@init {pushMsg("alter table partition statement", state);} -@after {popMsg(state);} - : tablePartitionPrefix alterTblPartitionStatementSuffix - -> ^(TOK_ALTERTABLE_PARTITION tablePartitionPrefix alterTblPartitionStatementSuffix) - ; - -alterTblPartitionStatementSuffix -@init {pushMsg("alter table partition statement suffix", state);} -@after {popMsg(state);} - : alterStatementSuffixFileFormat - | alterStatementSuffixLocation - | alterStatementSuffixProtectMode - | alterStatementSuffixMergeFiles - | alterStatementSuffixSerdeProperties - | alterStatementSuffixRenamePart - | alterStatementSuffixStatsPart - | alterStatementSuffixBucketNum - | alterTblPartitionStatementSuffixSkewedLocation - | alterStatementSuffixClusterbySortby - | alterStatementSuffixCompact - ; - alterStatementSuffixFileFormat @init {pushMsg("alter fileformat statement", state); } @after {popMsg(state);} @@ -1165,7 +1151,7 @@ alterTblPartitionStatementSuffixSkewedLocation @init {pushMsg("alter partition skewed location", state);} @after {popMsg(state);} : KW_SET KW_SKEWED KW_LOCATION skewedLocations - -> ^(TOK_ALTERTBLPART_SKEWED_LOCATION skewedLocations) + -> ^(TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations) ; skewedLocations @@ -1200,21 +1186,21 @@ alterStatementSuffixLocation alterStatementSuffixSkewedby @init {pushMsg("alter skewed by statement", state);} @after{popMsg(state);} - : tableName tableSkewed - ->^(TOK_ALTERTABLE_SKEWED tableName tableSkewed) + : tableSkewed + ->^(TOK_ALTERTABLE_SKEWED tableSkewed) | - tableName KW_NOT KW_SKEWED - ->^(TOK_ALTERTABLE_SKEWED tableName) + KW_NOT KW_SKEWED + ->^(TOK_ALTERTABLE_SKEWED) | - tableName KW_NOT storedAsDirs - ->^(TOK_ALTERTABLE_SKEWED tableName storedAsDirs) + KW_NOT storedAsDirs + ->^(TOK_ALTERTABLE_SKEWED storedAsDirs) ; alterStatementSuffixExchangePartition @init {pushMsg("alter exchange partition", state);} @after{popMsg(state);} - : tableName KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename=tableName - -> ^(TOK_EXCHANGEPARTITION tableName partitionSpec $exchangename) + : KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename=tableName + -> ^(TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename) ; alterStatementSuffixProtectMode @@ -1264,14 +1250,14 @@ alterStatementSuffixBucketNum @init { pushMsg("", state); } @after { popMsg(state); } : KW_INTO num=Number KW_BUCKETS - -> ^(TOK_TABLEBUCKETS $num) + -> ^(TOK_ALTERTABLE_BUCKETS $num) ; alterStatementSuffixCompact @init { msgs.push("compaction request"); } @after { msgs.pop(); } : KW_COMPACT compactType=StringLiteral - -> ^(TOK_COMPACT $compactType) + -> ^(TOK_ALTERTABLE_COMPACT $compactType) ; @@ -1693,7 +1679,7 @@ tableBuckets @after { popMsg(state); } : KW_CLUSTERED KW_BY LPAREN bucketCols=columnNameList RPAREN (KW_SORTED KW_BY LPAREN sortCols=columnNameOrderList RPAREN)? KW_INTO num=Number KW_BUCKETS - -> ^(TOK_TABLEBUCKETS $bucketCols $sortCols? $num) + -> ^(TOK_ALTERTABLE_BUCKETS $bucketCols $sortCols? $num) ; tableSkewed diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java index 8527239..2b239ab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java @@ -88,6 +88,7 @@ private void doIndexUpdate(List tblIndexes) throws HiveException { sb.append("ALTER INDEX "); sb.append(idx.getIndexName()); sb.append(" ON "); + sb.append(idx.getDbName()).append('.'); sb.append(idx.getOrigTableName()); sb.append(" REBUILD"); driver.compile(sb.toString(), false); @@ -125,6 +126,7 @@ private void doIndexUpdate(Index index, Map partSpec) throws sb.append("ALTER INDEX "); sb.append(index.getIndexName()); sb.append(" ON "); + sb.append(index.getDbName()).append('.'); sb.append(index.getOrigTableName()); sb.append(" PARTITION "); sb.append(ps.toString()); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 7a71ec7..adb0c2f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -9402,7 +9402,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { // analyze create view command if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW || - ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) { + (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveParser.TOK_QUERY)) { child = analyzeCreateView(ast, qb); SessionState.get().setCommandType(HiveOperation.CREATEVIEW); if (child == null) { @@ -10090,7 +10090,7 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb) case HiveParser.TOK_TABLEPARTCOLS: partCols = getColumns((ASTNode) child.getChild(0), false); break; - case HiveParser.TOK_TABLEBUCKETS: + case HiveParser.TOK_ALTERTABLE_BUCKETS: bucketCols = getColumnNames((ASTNode) child.getChild(0)); if (child.getChildCount() == 2) { numBuckets = (Integer.valueOf(child.getChild(1).getText())) @@ -10313,7 +10313,8 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb) throw new SemanticException("Can't combine IF NOT EXISTS and OR REPLACE."); } - if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) { + if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && + ast.getChild(1).getType() == HiveParser.TOK_QUERY) { isAlterViewAs = true; orReplace = true; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 3dfce99..026efe8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -20,6 +20,7 @@ import java.util.HashMap; +import org.antlr.runtime.tree.Tree; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.session.SessionState; @@ -57,7 +58,7 @@ commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, HiveOperation.ALTERTABLE_ARCHIVE); commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_UNARCHIVE); commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES); - commandType.put(HiveParser.TOK_DROPTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES); + commandType.put(HiveParser.TOK_ALTERTABLE_DROPPROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES); commandType.put(HiveParser.TOK_SHOWDATABASES, HiveOperation.SHOWDATABASES); commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES); commandType.put(HiveParser.TOK_SHOWCOLUMNS, HiveOperation.SHOWCOLUMNS); @@ -81,9 +82,11 @@ commandType.put(HiveParser.TOK_ALTERINDEX_REBUILD, HiveOperation.ALTERINDEX_REBUILD); commandType.put(HiveParser.TOK_ALTERINDEX_PROPERTIES, HiveOperation.ALTERINDEX_PROPS); commandType.put(HiveParser.TOK_ALTERVIEW_PROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES); - commandType.put(HiveParser.TOK_DROPVIEW_PROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES); + commandType.put(HiveParser.TOK_ALTERVIEW_DROPPROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES); commandType.put(HiveParser.TOK_ALTERVIEW_ADDPARTS, HiveOperation.ALTERTABLE_ADDPARTS); commandType.put(HiveParser.TOK_ALTERVIEW_DROPPARTS, HiveOperation.ALTERTABLE_DROPPARTS); + commandType.put(HiveParser.TOK_ALTERVIEW_RENAME, HiveOperation.ALTERVIEW_RENAME); + commandType.put(HiveParser.TOK_ALTERVIEW, HiveOperation.ALTERVIEW_AS); commandType.put(HiveParser.TOK_QUERY, HiveOperation.QUERY); commandType.put(HiveParser.TOK_LOCKTABLE, HiveOperation.LOCKTABLE); commandType.put(HiveParser.TOK_UNLOCKTABLE, HiveOperation.UNLOCKTABLE); @@ -105,11 +108,9 @@ commandType.put(HiveParser.TOK_DESCDATABASE, HiveOperation.DESCDATABASE); commandType.put(HiveParser.TOK_ALTERTABLE_SKEWED, HiveOperation.ALTERTABLE_SKEWED); commandType.put(HiveParser.TOK_ANALYZE, HiveOperation.ANALYZE_TABLE); - commandType.put(HiveParser.TOK_ALTERVIEW_RENAME, HiveOperation.ALTERVIEW_RENAME); commandType.put(HiveParser.TOK_ALTERTABLE_PARTCOLTYPE, HiveOperation.ALTERTABLE_PARTCOLTYPE); commandType.put(HiveParser.TOK_SHOW_COMPACTIONS, HiveOperation.SHOW_COMPACTIONS); commandType.put(HiveParser.TOK_SHOW_TRANSACTIONS, HiveOperation.SHOW_TRANSACTIONS); - commandType.put(HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS, HiveOperation.ALTERTABLE_UPDATETABLESTATS); } static { @@ -134,17 +135,20 @@ HiveOperation.ALTERPARTITION_SERDEPROPERTIES }); tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_RENAMEPART, new HiveOperation[] {null, HiveOperation.ALTERTABLE_RENAMEPART}); - tablePartitionCommandType.put(HiveParser.TOK_COMPACT, + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_COMPACT, new HiveOperation[] {HiveOperation.ALTERTABLE_COMPACT, HiveOperation.ALTERTABLE_COMPACT}); - tablePartitionCommandType.put(HiveParser.TOK_ALTERTBLPART_SKEWED_LOCATION, + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION, new HiveOperation[] {HiveOperation.ALTERTBLPART_SKEWED_LOCATION, HiveOperation.ALTERTBLPART_SKEWED_LOCATION }); - tablePartitionCommandType.put(HiveParser.TOK_TABLEBUCKETS, + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_BUCKETS, new HiveOperation[] {HiveOperation.ALTERTABLE_BUCKETNUM, HiveOperation.ALTERPARTITION_BUCKETNUM}); tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_CLUSTER_SORT, new HiveOperation[] {HiveOperation.ALTERTABLE_CLUSTER_SORT, HiveOperation.ALTERTABLE_CLUSTER_SORT}); + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS, + new HiveOperation[] {HiveOperation.ALTERTABLE_UPDATETABLESTATS, + HiveOperation.ALTERTABLE_UPDATEPARTSTATS}); } public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) @@ -152,9 +156,9 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) if (tree.getToken() == null) { throw new RuntimeException("Empty Syntax Tree"); } else { - setSessionCommandType(commandType.get(tree.getToken().getType())); + setSessionCommandType(commandType.get(tree.getType())); - switch (tree.getToken().getType()) { + switch (tree.getType()) { case HiveParser.TOK_EXPLAIN: return new ExplainSemanticAnalyzer(conf); case HiveParser.TOK_EXPLAIN_SQ_REWRITE: @@ -165,6 +169,47 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) return new ExportSemanticAnalyzer(conf); case HiveParser.TOK_IMPORT: return new ImportSemanticAnalyzer(conf); + case HiveParser.TOK_ALTERTABLE: { + Tree child = tree.getChild(1); + switch (child.getType()) { + case HiveParser.TOK_ALTERTABLE_RENAME: + case HiveParser.TOK_ALTERTABLE_TOUCH: + case HiveParser.TOK_ALTERTABLE_ARCHIVE: + case HiveParser.TOK_ALTERTABLE_UNARCHIVE: + case HiveParser.TOK_ALTERTABLE_ADDCOLS: + case HiveParser.TOK_ALTERTABLE_RENAMECOL: + case HiveParser.TOK_ALTERTABLE_REPLACECOLS: + case HiveParser.TOK_ALTERTABLE_DROPPARTS: + case HiveParser.TOK_ALTERTABLE_ADDPARTS: + case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE: + case HiveParser.TOK_ALTERTABLE_PROPERTIES: + case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES: + case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION: + case HiveParser.TOK_ALTERTABLE_SKEWED: + setSessionCommandType(commandType.get(child.getType())); + return new DDLSemanticAnalyzer(conf); + } + HiveOperation commandType = + tablePartitionCommandType.get(child.getType())[tree.getChildCount() > 2 ? 1 : 0]; + setSessionCommandType(commandType); + return new DDLSemanticAnalyzer(conf); + } + case HiveParser.TOK_ALTERVIEW: { + Tree child = tree.getChild(1); + switch (child.getType()) { + case HiveParser.TOK_ALTERVIEW_PROPERTIES: + case HiveParser.TOK_ALTERVIEW_DROPPROPERTIES: + case HiveParser.TOK_ALTERVIEW_ADDPARTS: + case HiveParser.TOK_ALTERVIEW_DROPPARTS: + case HiveParser.TOK_ALTERVIEW_RENAME: + setSessionCommandType(commandType.get(child.getType())); + return new DDLSemanticAnalyzer(conf); + } + // TOK_ALTERVIEW_AS + assert child.getType() == HiveParser.TOK_QUERY; + setSessionCommandType(HiveOperation.ALTERVIEW_AS); + return new SemanticAnalyzer(conf); + } case HiveParser.TOK_CREATEDATABASE: case HiveParser.TOK_DROPDATABASE: case HiveParser.TOK_SWITCHDATABASE: @@ -174,24 +219,8 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_DESCFUNCTION: case HiveParser.TOK_MSCK: - case HiveParser.TOK_ALTERTABLE_ADDCOLS: - case HiveParser.TOK_ALTERTABLE_RENAMECOL: - case HiveParser.TOK_ALTERTABLE_REPLACECOLS: - case HiveParser.TOK_ALTERTABLE_RENAME: - case HiveParser.TOK_ALTERTABLE_DROPPARTS: - case HiveParser.TOK_ALTERTABLE_ADDPARTS: - case HiveParser.TOK_ALTERTABLE_PROPERTIES: - case HiveParser.TOK_DROPTABLE_PROPERTIES: - case HiveParser.TOK_ALTERTABLE_SERIALIZER: - case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: - case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE: case HiveParser.TOK_ALTERINDEX_REBUILD: case HiveParser.TOK_ALTERINDEX_PROPERTIES: - case HiveParser.TOK_ALTERVIEW_PROPERTIES: - case HiveParser.TOK_DROPVIEW_PROPERTIES: - case HiveParser.TOK_ALTERVIEW_ADDPARTS: - case HiveParser.TOK_ALTERVIEW_DROPPARTS: - case HiveParser.TOK_ALTERVIEW_RENAME: case HiveParser.TOK_SHOWDATABASES: case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOWCOLUMNS: @@ -209,9 +238,6 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_CREATEINDEX: case HiveParser.TOK_DROPINDEX: case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: - case HiveParser.TOK_ALTERTABLE_TOUCH: - case HiveParser.TOK_ALTERTABLE_ARCHIVE: - case HiveParser.TOK_ALTERTABLE_UNARCHIVE: case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: case HiveParser.TOK_LOCKDB: @@ -228,23 +254,8 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_SHOW_ROLES: case HiveParser.TOK_ALTERDATABASE_PROPERTIES: case HiveParser.TOK_ALTERDATABASE_OWNER: - case HiveParser.TOK_ALTERTABLE_SKEWED: case HiveParser.TOK_TRUNCATETABLE: - case HiveParser.TOK_EXCHANGEPARTITION: case HiveParser.TOK_SHOW_SET_ROLE: - case HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS: - return new DDLSemanticAnalyzer(conf); - case HiveParser.TOK_ALTERTABLE_PARTITION: - HiveOperation commandType = null; - Integer type = ((ASTNode) tree.getChild(1)).getToken().getType(); - if (type == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS) { - commandType = HiveOperation.ALTERTABLE_UPDATEPARTSTATS; - } else if (tree.getChild(0).getChildCount() > 1) { - commandType = tablePartitionCommandType.get(type)[1]; - } else { - commandType = tablePartitionCommandType.get(type)[0]; - } - setSessionCommandType(commandType); return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_CREATEFUNCTION: diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index 20d863b..8517319 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -44,11 +44,19 @@ * */ public static enum AlterTableTypes { - RENAME, ADDCOLS, REPLACECOLS, ADDPROPS, DROPPROPS, ADDSERDE, ADDSERDEPROPS, - ADDFILEFORMAT, ADDCLUSTERSORTCOLUMN, RENAMECOLUMN, ADDPARTITION, - TOUCH, ARCHIVE, UNARCHIVE, ALTERPROTECTMODE, ALTERPARTITIONPROTECTMODE, - ALTERLOCATION, DROPPARTITION, RENAMEPARTITION, ADDSKEWEDBY, ALTERSKEWEDLOCATION, - ALTERBUCKETNUM, ALTERPARTITION, COMPACT + RENAME("rename"), ADDCOLS("add columns"), REPLACECOLS("replace columns"), + ADDPROPS("add props"), DROPPROPS("drop props"), ADDSERDE("add serde"), ADDSERDEPROPS("add serde props"), + ADDFILEFORMAT("add fileformat"), ADDCLUSTERSORTCOLUMN("add cluster sort column"), + RENAMECOLUMN("rename column"), ADDPARTITION("add partition"), TOUCH("touch"), ARCHIVE("archieve"), + UNARCHIVE("unarchieve"), ALTERPROTECTMODE("alter protect mode"), + ALTERPARTITIONPROTECTMODE("alter partition protect mode"), ALTERLOCATION("alter location"), + DROPPARTITION("drop partition"), RENAMEPARTITION("rename partition"), ADDSKEWEDBY("add skew column"), + ALTERSKEWEDLOCATION("alter skew location"), ALTERBUCKETNUM("alter bucket number"), + ALTERPARTITION("alter partition"), COMPACT("compact"); + + private final String name; + private AlterTableTypes(String name) { this.name = name; } + public String getName() { return name; } } public static enum ProtectModeType { @@ -236,16 +244,7 @@ public AlterTableDesc(String tableName, HashMap partSpec, int nu @Explain(displayName = "type") public String getAlterTableTypeString() { - switch (op) { - case RENAME: - return "rename"; - case ADDCOLS: - return "add columns"; - case REPLACECOLS: - return "replace columns"; - } - - return "unknown"; + return op.getName(); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index 67be666..75cdf16 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -114,8 +114,8 @@ new Privilege[] {Privilege.ALTER_DATA}, null), ALTERTABLE_PARTCOLTYPE("ALTERTABLE_PARTCOLTYPE", new Privilege[] { Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }), ALTERVIEW_RENAME("ALTERVIEW_RENAME", new Privilege[] {Privilege.ALTER_METADATA}, null), - ALTERTABLE_COMPACT("ALTERTABLE_COMPACT", new Privilege[]{Privilege.SELECT}, - new Privilege[]{Privilege.ALTER_DATA}), + ALTERVIEW_AS("ALTERVIEW_AS", new Privilege[] {Privilege.ALTER_METADATA}, null), + ALTERTABLE_COMPACT("ALTERTABLE_COMPACT", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.ALTER_DATA}), SHOW_COMPACTIONS("SHOW COMPACTIONS", null, null), SHOW_TRANSACTIONS("SHOW TRANSACTIONS", null, null); ; diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java index 29ae4a0..c4469a5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java @@ -114,6 +114,7 @@ ALTERTABLE_SKEWED, ALTERTBLPART_SKEWED_LOCATION, ALTERVIEW_RENAME, + ALTERVIEW_AS, ALTERTABLE_COMPACT, SHOW_COMPACTIONS, SHOW_TRANSACTIONS, diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index 45404fe..f1fbf9f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -246,6 +246,8 @@ public HivePrivilegeObjectType getObjectType() { (OWNER_PRIV_AR, OWNER_PRIV_AR)); op2Priv.put(HiveOperationType.ALTERVIEW_RENAME, PrivRequirement.newIOPrivRequirement (OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERVIEW_AS, PrivRequirement.newIOPrivRequirement +(OWNER_PRIV_AR, OWNER_PRIV_AR)); op2Priv.put(HiveOperationType.DROPVIEW, PrivRequirement.newIOPrivRequirement (OWNER_PRIV_AR, OWNER_PRIV_AR)); diff --git ql/src/test/queries/clientpositive/add_part_exist.q ql/src/test/queries/clientpositive/add_part_exist.q index d176661..b8ace72 100644 --- ql/src/test/queries/clientpositive/add_part_exist.q +++ ql/src/test/queries/clientpositive/add_part_exist.q @@ -18,20 +18,21 @@ SHOW TABLES; -- Test ALTER TABLE ADD PARTITION in non-default Database CREATE DATABASE add_part_test_db; -USE add_part_test_db; -SHOW TABLES; -CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING); -SHOW PARTITIONS add_part_test; +CREATE TABLE add_part_test_db.add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING); +SHOW PARTITIONS add_part_test_db.add_part_test; -ALTER TABLE add_part_test ADD PARTITION (ds='2010-01-01'); -SHOW PARTITIONS add_part_test; +ALTER TABLE add_part_test_db.add_part_test ADD PARTITION (ds='2010-01-01'); +SHOW PARTITIONS add_part_test_db.add_part_test; -ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01'); -SHOW PARTITIONS add_part_test; +ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01'); +SHOW PARTITIONS add_part_test_db.add_part_test; -ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02'); -SHOW PARTITIONS add_part_test; +ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02'); +SHOW PARTITIONS add_part_test_db.add_part_test; -ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03'); -SHOW PARTITIONS add_part_test; +ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03'); +SHOW PARTITIONS add_part_test_db.add_part_test; + +DROP TABLE add_part_test_db.add_part_test; +DROP DATABASE add_part_test_db; diff --git ql/src/test/queries/clientpositive/alter1.q ql/src/test/queries/clientpositive/alter1.q index 312a017..2fac195 100644 --- ql/src/test/queries/clientpositive/alter1.q +++ ql/src/test/queries/clientpositive/alter1.q @@ -32,40 +32,38 @@ SHOW TABLES; -- With non-default Database CREATE DATABASE alter1_db; -USE alter1_db; -SHOW TABLES; +SHOW TABLES alter1_db; -CREATE TABLE alter1(a INT, b INT); -DESCRIBE EXTENDED alter1; +CREATE TABLE alter1_db.alter1(a INT, b INT); +DESCRIBE EXTENDED alter1_db.alter1; -ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='3'); -DESCRIBE EXTENDED alter1; +ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('a'='1', 'c'='3'); +DESCRIBE EXTENDED alter1_db.alter1; -ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3'); -DESCRIBE EXTENDED alter1; +ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3'); +DESCRIBE EXTENDED alter1_db.alter1; -ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE'); -DESCRIBE EXTENDED alter1; +ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE'); +DESCRIBE EXTENDED alter1_db.alter1; -ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE'); -DESCRIBE EXTENDED alter1; +ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE'); +DESCRIBE EXTENDED alter1_db.alter1; -ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='9'); -DESCRIBE EXTENDED alter1; +ALTER TABLE alter1_db.alter1 SET SERDEPROPERTIES('s1'='9'); +DESCRIBE EXTENDED alter1_db.alter1; -ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20'); -DESCRIBE EXTENDED alter1; +ALTER TABLE alter1_db.alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20'); +DESCRIBE EXTENDED alter1_db.alter1; add jar ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar; -ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9'); -DESCRIBE EXTENDED alter1; +ALTER TABLE alter1_db.alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9'); +DESCRIBE EXTENDED alter1_db.alter1; -ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'; -DESCRIBE EXTENDED alter1; +ALTER TABLE alter1_db.alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'; +DESCRIBE EXTENDED alter1_db.alter1; -ALTER TABLE alter1 REPLACE COLUMNS (a int, b int, c string); -DESCRIBE alter1; +ALTER TABLE alter1_db.alter1 REPLACE COLUMNS (a int, b int, c string); +DESCRIBE alter1_db.alter1; -DROP TABLE alter1; -USE default; +DROP TABLE alter1_db.alter1; DROP DATABASE alter1_db; diff --git ql/src/test/queries/clientpositive/alter_char1.q ql/src/test/queries/clientpositive/alter_char1.q index d391138..b604656 100644 --- ql/src/test/queries/clientpositive/alter_char1.q +++ ql/src/test/queries/clientpositive/alter_char1.q @@ -1,34 +1,35 @@ -- SORT_QUERY_RESULTS -drop table alter_char_1; +create database ac; -create table alter_char_1 (key string, value string); -insert overwrite table alter_char_1 +create table ac.alter_char_1 (key string, value string); +insert overwrite table ac.alter_char_1 select key, value from src order by key limit 5; -select * from alter_char_1; +select * from ac.alter_char_1; -- change column to char -alter table alter_char_1 change column value value char(20); +alter table ac.alter_char_1 change column value value char(20); -- contents should still look the same -select * from alter_char_1; +select * from ac.alter_char_1; -- change column to smaller char -alter table alter_char_1 change column value value char(3); +alter table ac.alter_char_1 change column value value char(3); -- value column should be truncated now -select * from alter_char_1; +select * from ac.alter_char_1; -- change back to bigger char -alter table alter_char_1 change column value value char(20); +alter table ac.alter_char_1 change column value value char(20); -- column values should be full size again -select * from alter_char_1; +select * from ac.alter_char_1; -- add char column -alter table alter_char_1 add columns (key2 int, value2 char(10)); -select * from alter_char_1; +alter table ac.alter_char_1 add columns (key2 int, value2 char(10)); +select * from ac.alter_char_1; -insert overwrite table alter_char_1 +insert overwrite table ac.alter_char_1 select key, value, key, value from src order by key limit 5; -select * from alter_char_1; +select * from ac.alter_char_1; -drop table alter_char_1; +drop table ac.alter_char_1; +drop database ac; diff --git ql/src/test/queries/clientpositive/alter_index.q ql/src/test/queries/clientpositive/alter_index.q index 2aa13da..3a3d13c 100644 --- ql/src/test/queries/clientpositive/alter_index.q +++ ql/src/test/queries/clientpositive/alter_index.q @@ -1,11 +1,11 @@ drop index src_index_8 on src; -create index src_index_8 on table src(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2"); +create index src_index_8 on table default.src(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2"); desc extended default__src_src_index_8__; -alter index src_index_8 on src set IDXPROPERTIES ("prop1"="val1_new", "prop3"="val3"); +alter index src_index_8 on default.src set IDXPROPERTIES ("prop1"="val1_new", "prop3"="val3"); desc extended default__src_src_index_8__; -drop index src_index_8 on src; +drop index src_index_8 on default.src; show tables; diff --git ql/src/test/queries/clientpositive/alter_partition_coltype.q ql/src/test/queries/clientpositive/alter_partition_coltype.q index 115eaf9..8c9945c 100644 --- ql/src/test/queries/clientpositive/alter_partition_coltype.q +++ ql/src/test/queries/clientpositive/alter_partition_coltype.q @@ -51,18 +51,23 @@ desc alter_coltype partition (dt='100', ts=3.0); drop table alter_coltype; -create table alterdynamic_part_table(intcol string) partitioned by (partcol1 string, partcol2 string); +create database pt; + +create table pt.alterdynamic_part_table(intcol string) partitioned by (partcol1 string, partcol2 string); set hive.exec.dynamic.partition.mode=nonstrict; -insert into table alterdynamic_part_table partition(partcol1, partcol2) select '1', '1', '1' from src where key=150 limit 5; +insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select '1', '1', '1' from src where key=150 limit 5; + +insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select '1', '2', '1' from src where key=150 limit 5; +insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select NULL, '1', '1' from src where key=150 limit 5; -insert into table alterdynamic_part_table partition(partcol1, partcol2) select '1', '2', '1' from src where key=150 limit 5; -insert into table alterdynamic_part_table partition(partcol1, partcol2) select NULL, '1', '1' from src where key=150 limit 5; +alter table pt.alterdynamic_part_table partition column (partcol1 int); -alter table alterdynamic_part_table partition column (partcol1 int); +explain extended select intcol from pt.alterdynamic_part_table where partcol1='1' and partcol2='1'; -explain extended select intcol from alterdynamic_part_table where partcol1='1' and partcol2='1'; +explain extended select intcol from pt.alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__'); +select intcol from pt.alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__'); -explain extended select intcol from alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__'); -select intcol from alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__'); \ No newline at end of file +drop table pt.alterdynamic_part_table; +drop database pt; diff --git ql/src/test/queries/clientpositive/alter_skewed_table.q ql/src/test/queries/clientpositive/alter_skewed_table.q index 216bbb5..cdc4567 100644 --- ql/src/test/queries/clientpositive/alter_skewed_table.q +++ ql/src/test/queries/clientpositive/alter_skewed_table.q @@ -1,6 +1,6 @@ set hive.mapred.supports.subdirectories=true; -create table original (key STRING, value STRING); +create table original (key STRING, value STRING); describe formatted original; @@ -10,23 +10,27 @@ describe formatted original; drop table original; -create table original2 (key STRING, value STRING) ; +create database skew_test; -describe formatted original2; +create table skew_test.original2 (key STRING, value STRING) ; -alter table original2 SKEWED BY (key, value) ON ((1,1),(5,6)); +describe formatted skew_test.original2; -describe formatted original2; +alter table skew_test.original2 SKEWED BY (key, value) ON ((1,1),(5,6)); -drop table original2; +describe formatted skew_test.original2; -create table original3 (key STRING, value STRING) SKEWED BY (key, value) ON ((1,1),(5,6)); +drop table skew_test.original2; -describe formatted original3; +create table skew_test.original3 (key STRING, value STRING) SKEWED BY (key, value) ON ((1,1),(5,6)); -alter table original3 not skewed; +describe formatted skew_test.original3; -describe formatted original3; +alter table skew_test.original3 not skewed; -drop table original3; +describe formatted skew_test.original3; + +drop table skew_test.original3; + +drop database skew_test; diff --git ql/src/test/queries/clientpositive/alter_varchar1.q ql/src/test/queries/clientpositive/alter_varchar1.q index 6f644a0..8ed3d20 100644 --- ql/src/test/queries/clientpositive/alter_varchar1.q +++ ql/src/test/queries/clientpositive/alter_varchar1.q @@ -1,34 +1,35 @@ -- SORT_QUERY_RESULTS -drop table alter_varchar_1; +create database avc; -create table alter_varchar_1 (key string, value string); -insert overwrite table alter_varchar_1 +create table avc.alter_varchar_1 (key string, value string); +insert overwrite table avc.alter_varchar_1 select key, value from src order by key limit 5; -select * from alter_varchar_1; +select * from avc.alter_varchar_1; -- change column to varchar -alter table alter_varchar_1 change column value value varchar(20); +alter table avc.alter_varchar_1 change column value value varchar(20); -- contents should still look the same -select * from alter_varchar_1; +select * from avc.alter_varchar_1; -- change column to smaller varchar -alter table alter_varchar_1 change column value value varchar(3); +alter table avc.alter_varchar_1 change column value value varchar(3); -- value column should be truncated now -select * from alter_varchar_1; +select * from avc.alter_varchar_1; -- change back to bigger varchar -alter table alter_varchar_1 change column value value varchar(20); +alter table avc.alter_varchar_1 change column value value varchar(20); -- column values should be full size again -select * from alter_varchar_1; +select * from avc.alter_varchar_1; -- add varchar column -alter table alter_varchar_1 add columns (key2 int, value2 varchar(10)); -select * from alter_varchar_1; +alter table avc.alter_varchar_1 add columns (key2 int, value2 varchar(10)); +select * from avc.alter_varchar_1; -insert overwrite table alter_varchar_1 +insert overwrite table avc.alter_varchar_1 select key, value, key, value from src order by key limit 5; -select * from alter_varchar_1; +select * from avc.alter_varchar_1; -drop table alter_varchar_1; +drop table avc.alter_varchar_1; +drop database avc; diff --git ql/src/test/queries/clientpositive/alter_view_as_select.q ql/src/test/queries/clientpositive/alter_view_as_select.q index dcab3ca..d2519a8 100644 --- ql/src/test/queries/clientpositive/alter_view_as_select.q +++ ql/src/test/queries/clientpositive/alter_view_as_select.q @@ -1,13 +1,16 @@ -DROP VIEW testView; -CREATE VIEW testView as SELECT * FROM srcpart; -DESCRIBE FORMATTED testView; +CREATE DATABASE tv; +CREATE VIEW tv.testView as SELECT * FROM srcpart; +DESCRIBE FORMATTED tv.testView; -ALTER VIEW testView AS SELECT value FROM src WHERE key=86; -DESCRIBE FORMATTED testView; +ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86; +DESCRIBE FORMATTED tv.testView; -ALTER VIEW testView AS +ALTER VIEW tv.testView AS SELECT * FROM src WHERE key > 80 AND key < 100 ORDER BY key, value LIMIT 10; -DESCRIBE FORMATTED testView; +DESCRIBE FORMATTED tv.testView; + +DROP VIEW tv.testView; +DROP DATABASE tv; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/alter_view_rename.q ql/src/test/queries/clientpositive/alter_view_rename.q index 68cf9d6..f91fbb8 100644 --- ql/src/test/queries/clientpositive/alter_view_rename.q +++ ql/src/test/queries/clientpositive/alter_view_rename.q @@ -1,10 +1,16 @@ +CREATE DATABASE tv1; +CREATE DATABASE tv2; + CREATE TABLE invites (foo INT, bar STRING) PARTITIONED BY (ds STRING); -CREATE VIEW view1 as SELECT * FROM invites; -DESCRIBE EXTENDED view1; +CREATE VIEW tv1.view1 as SELECT * FROM invites; +DESCRIBE EXTENDED tv1.view1; -ALTER VIEW view1 RENAME TO view2; -DESCRIBE EXTENDED view2; -SELECT * FROM view2; +ALTER VIEW tv1.view1 RENAME TO tv2.view2; +DESCRIBE EXTENDED tv2.view2; +SELECT * FROM tv2.view2; DROP TABLE invites; -DROP VIEW view2; \ No newline at end of file +DROP VIEW tv2.view2; + +DROP DATABASE tv1; +DROP DATABASE tv2; diff --git ql/src/test/queries/clientpositive/archive_multi.q ql/src/test/queries/clientpositive/archive_multi.q index 2c1a6d8..1004aca 100644 --- ql/src/test/queries/clientpositive/archive_multi.q +++ ql/src/test/queries/clientpositive/archive_multi.q @@ -1,42 +1,41 @@ set hive.archive.enabled = true; set hive.enforce.bucketing = true; -drop table tstsrc; -drop table tstsrcpart; +create database ac_test; -create table tstsrc like src; -insert overwrite table tstsrc select key, value from src; +create table ac_test.tstsrc like default.src; +insert overwrite table ac_test.tstsrc select key, value from default.src; -create table tstsrcpart like srcpart; +create table ac_test.tstsrcpart like default.srcpart; -insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') -select key, value from srcpart where ds='2008-04-08' and hr='11'; +insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11'; -insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12') -select key, value from srcpart where ds='2008-04-08' and hr='12'; +insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-08', hr='12') +select key, value from default.srcpart where ds='2008-04-08' and hr='12'; -insert overwrite table tstsrcpart partition (ds='2008-04-09', hr='11') -select key, value from srcpart where ds='2008-04-09' and hr='11'; +insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-09', hr='11') +select key, value from default.srcpart where ds='2008-04-09' and hr='11'; -insert overwrite table tstsrcpart partition (ds='2008-04-09', hr='12') -select key, value from srcpart where ds='2008-04-09' and hr='12'; +insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-09', hr='12') +select key, value from default.srcpart where ds='2008-04-09' and hr='12'; -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19) SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2; +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2; -ALTER TABLE tstsrcpart ARCHIVE PARTITION (ds='2008-04-08'); +ALTER TABLE ac_test.tstsrcpart ARCHIVE PARTITION (ds='2008-04-08'); SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2; +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2; -SELECT key, count(1) FROM tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key; +SELECT key, count(1) FROM ac_test.tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key; -SELECT * FROM tstsrcpart a JOIN tstsrc b ON a.key=b.key +SELECT * FROM ac_test.tstsrcpart a JOIN ac_test.tstsrc b ON a.key=b.key WHERE a.ds='2008-04-08' AND a.hr='12' AND a.key='0'; -ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08'); +ALTER TABLE ac_test.tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08'); SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2; +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2; diff --git ql/src/test/queries/clientpositive/create_or_replace_view.q ql/src/test/queries/clientpositive/create_or_replace_view.q index a8f59b7..0148224 100644 --- ql/src/test/queries/clientpositive/create_or_replace_view.q +++ ql/src/test/queries/clientpositive/create_or_replace_view.q @@ -1,32 +1,39 @@ -drop view v; -create view v as select * from srcpart; -describe formatted v; +create database vt; + +create view vt.v as select * from srcpart; +describe formatted vt.v; -- modifying definition of unpartitioned view -create or replace view v partitioned on (ds, hr) as select * from srcpart; -alter view v add partition (ds='2008-04-08',hr='11'); -alter view v add partition (ds='2008-04-08',hr='12'); -select * from v where value='val_409' and ds='2008-04-08' and hr='11'; -describe formatted v; -show partitions v; +create or replace view vt.v partitioned on (ds, hr) as select * from srcpart; +alter view vt.v add partition (ds='2008-04-08',hr='11'); +alter view vt.v add partition (ds='2008-04-08',hr='12'); +select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11'; +describe formatted vt.v; +show partitions vt.v; + +alter view vt.v drop partition (ds='2008-04-08',hr='11'); +alter view vt.v drop partition (ds='2008-04-08',hr='12'); +show partitions vt.v; -- altering partitioned view 1 -create or replace view v partitioned on (ds, hr) as select value, ds, hr from srcpart; -select * from v where value='val_409' and ds='2008-04-08' and hr='11'; -describe formatted v; -show partitions v; +create or replace view vt.v partitioned on (ds, hr) as select value, ds, hr from srcpart; +select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11'; +describe formatted vt.v; +show partitions vt.v; -- altering partitioned view 2 -create or replace view v partitioned on (ds, hr) as select key, value, ds, hr from srcpart; -select * from v where value='val_409' and ds='2008-04-08' and hr='11'; -describe formatted v; -show partitions v; -drop view v; +create or replace view vt.v partitioned on (ds, hr) as select key, value, ds, hr from srcpart; +select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11'; +describe formatted vt.v; +show partitions vt.v; +drop view vt.v; -- updating to fix view with invalid definition create table srcpart_temp like srcpart; -create view v partitioned on (ds, hr) as select * from srcpart_temp; -drop table srcpart_temp; -- v is now invalid -create or replace view v partitioned on (ds, hr) as select * from srcpart; -describe formatted v; -drop view v; \ No newline at end of file +create view vt.v partitioned on (ds, hr) as select * from srcpart_temp; +drop table srcpart_temp; -- vt.v is now invalid +create or replace view vt.v partitioned on (ds, hr) as select * from srcpart; +describe formatted vt.v; +drop view vt.v; + +drop database vt; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/drop_multi_partitions.q ql/src/test/queries/clientpositive/drop_multi_partitions.q index 14e2356..7ee7ae7 100644 --- ql/src/test/queries/clientpositive/drop_multi_partitions.q +++ ql/src/test/queries/clientpositive/drop_multi_partitions.q @@ -1,17 +1,23 @@ -create table mp (a string) partitioned by (b string, c string); +create database dmp; -alter table mp add partition (b='1', c='1'); -alter table mp add partition (b='1', c='2'); -alter table mp add partition (b='2', c='2'); +create table dmp.mp (a string) partitioned by (b string, c string); -show partitions mp; +alter table dmp.mp add partition (b='1', c='1'); +alter table dmp.mp add partition (b='1', c='2'); +alter table dmp.mp add partition (b='2', c='2'); -explain extended alter table mp drop partition (b='1'); -alter table mp drop partition (b='1'); +show partitions dmp.mp; -show partitions mp; +explain extended alter table dmp.mp drop partition (b='1'); +alter table dmp.mp drop partition (b='1'); + +show partitions dmp.mp; set hive.exec.drop.ignorenonexistent=false; -alter table mp drop if exists partition (b='3'); +alter table dmp.mp drop if exists partition (b='3'); + +show partitions dmp.mp; + +drop table dmp.mp; -show partitions mp; +drop database dmp; diff --git ql/src/test/queries/clientpositive/exchange_partition.q ql/src/test/queries/clientpositive/exchange_partition.q index 4be6e3f..96a8c90 100644 --- ql/src/test/queries/clientpositive/exchange_partition.q +++ ql/src/test/queries/clientpositive/exchange_partition.q @@ -1,12 +1,15 @@ -CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING); -CREATE TABLE exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING); -SHOW PARTITIONS exchange_part_test1; -SHOW PARTITIONS exchange_part_test2; +create database ex1; +create database ex2; -ALTER TABLE exchange_part_test2 ADD PARTITION (ds='2013-04-05'); -SHOW PARTITIONS exchange_part_test1; -SHOW PARTITIONS exchange_part_test2; +CREATE TABLE ex1.exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING); +CREATE TABLE ex2.exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING); +SHOW PARTITIONS ex1.exchange_part_test1; +SHOW PARTITIONS ex2.exchange_part_test2; -ALTER TABLE exchange_part_test1 EXCHANGE PARTITION (ds='2013-04-05') WITH TABLE exchange_part_test2; -SHOW PARTITIONS exchange_part_test1; -SHOW PARTITIONS exchange_part_test2; +ALTER TABLE ex2.exchange_part_test2 ADD PARTITION (ds='2013-04-05'); +SHOW PARTITIONS ex1.exchange_part_test1; +SHOW PARTITIONS ex2.exchange_part_test2; + +ALTER TABLE ex1.exchange_part_test1 EXCHANGE PARTITION (ds='2013-04-05') WITH TABLE ex2.exchange_part_test2; +SHOW PARTITIONS ex1.exchange_part_test1; +SHOW PARTITIONS ex2.exchange_part_test2; diff --git ql/src/test/queries/clientpositive/index_auto_empty.q ql/src/test/queries/clientpositive/index_auto_empty.q index 41f4a40..12c6681 100644 --- ql/src/test/queries/clientpositive/index_auto_empty.q +++ ql/src/test/queries/clientpositive/index_auto_empty.q @@ -1,22 +1,25 @@ -- Test to ensure that an empty index result is propagated correctly +CREATE DATABASE it; -- Create temp, and populate it with some values in src. -CREATE TABLE temp(key STRING, val STRING) STORED AS TEXTFILE; +CREATE TABLE it.temp(key STRING, val STRING) STORED AS TEXTFILE; set hive.stats.dbclass=fs; --- Build an index on temp. -CREATE INDEX temp_index ON TABLE temp(key) as 'COMPACT' WITH DEFERRED REBUILD; -ALTER INDEX temp_index ON temp REBUILD; +-- Build an index on it.temp. +CREATE INDEX temp_index ON TABLE it.temp(key) as 'COMPACT' WITH DEFERRED REBUILD; +ALTER INDEX temp_index ON it.temp REBUILD; SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; SET hive.optimize.index.filter=true; SET hive.optimize.index.filter.compact.minsize=0; -- query should not return any values -SELECT * FROM default__temp_temp_index__ WHERE key = 86; -EXPLAIN SELECT * FROM temp WHERE key = 86; -SELECT * FROM temp WHERE key = 86; +SELECT * FROM it.it__temp_temp_index__ WHERE key = 86; +EXPLAIN SELECT * FROM it.temp WHERE key = 86; +SELECT * FROM it.temp WHERE key = 86; SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; SET hive.optimize.index.filter=false; -DROP table temp; +DROP table it.temp; + +DROP DATABASE it; diff --git ql/src/test/queries/clientpositive/touch.q ql/src/test/queries/clientpositive/touch.q index 8a661ef..6686b68 100644 --- ql/src/test/queries/clientpositive/touch.q +++ ql/src/test/queries/clientpositive/touch.q @@ -1,17 +1,17 @@ -drop table tstsrc; -drop table tstsrcpart; +create database tc; -create table tstsrc like src; -insert overwrite table tstsrc select key, value from src; +create table tc.tstsrc like default.src; +insert overwrite table tc.tstsrc select key, value from default.src; -create table tstsrcpart like srcpart; -insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12') -select key, value from srcpart where ds='2008-04-08' and hr='12'; +create table tc.tstsrcpart like default.srcpart; +insert overwrite table tc.tstsrcpart partition (ds='2008-04-08', hr='12') +select key, value from default.srcpart where ds='2008-04-08' and hr='12'; +ALTER TABLE tc.tstsrc TOUCH; +ALTER TABLE tc.tstsrcpart TOUCH; +ALTER TABLE tc.tstsrcpart TOUCH PARTITION (ds='2008-04-08', hr='12'); -ALTER TABLE tstsrc TOUCH; -ALTER TABLE tstsrcpart TOUCH; -ALTER TABLE tstsrcpart TOUCH PARTITION (ds='2008-04-08', hr='12'); +drop table tc.tstsrc; +drop table tc.tstsrcpart; -drop table tstsrc; -drop table tstsrcpart; +drop database tc; diff --git ql/src/test/queries/clientpositive/unset_table_view_property.q ql/src/test/queries/clientpositive/unset_table_view_property.q index f838cd1..4545230 100644 --- ql/src/test/queries/clientpositive/unset_table_view_property.q +++ ql/src/test/queries/clientpositive/unset_table_view_property.q @@ -1,64 +1,71 @@ -CREATE TABLE testTable(col1 INT, col2 INT); -SHOW TBLPROPERTIES testTable; +CREATE DATABASE vt; + +CREATE TABLE vt.testTable(col1 INT, col2 INT); +SHOW TBLPROPERTIES vt.testTable; -- UNSET TABLE PROPERTIES -ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3'); -SHOW TBLPROPERTIES testTable; +ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3'); +SHOW TBLPROPERTIES vt.testTable; -- UNSET all the properties -ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'c'); -SHOW TBLPROPERTIES testTable; +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('a', 'c'); +SHOW TBLPROPERTIES vt.testTable; -ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4'); -SHOW TBLPROPERTIES testTable; +ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4'); +SHOW TBLPROPERTIES vt.testTable; -- UNSET a subset of the properties -ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'd'); -SHOW TBLPROPERTIES testTable; +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('a', 'd'); +SHOW TBLPROPERTIES vt.testTable; -- the same property being UNSET multiple times -ALTER TABLE testTable UNSET TBLPROPERTIES ('c', 'c', 'c'); -SHOW TBLPROPERTIES testTable; +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('c', 'c', 'c'); +SHOW TBLPROPERTIES vt.testTable; -ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4'); -SHOW TBLPROPERTIES testTable; +ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4'); +SHOW TBLPROPERTIES vt.testTable; -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f'); -SHOW TBLPROPERTIES testTable; +ALTER TABLE vt.testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f'); +SHOW TBLPROPERTIES vt.testTable; -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z'); -SHOW TBLPROPERTIES testTable; +ALTER TABLE vt.testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z'); +SHOW TBLPROPERTIES vt.testTable; + +DROP TABLE vt.testTable; -- UNSET VIEW PROPERTIES -CREATE VIEW testView AS SELECT value FROM src WHERE key=86; -ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propB'='200'); -SHOW TBLPROPERTIES testView; +CREATE VIEW vt.testView AS SELECT value FROM src WHERE key=86; +ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propB'='200'); +SHOW TBLPROPERTIES vt.testView; -- UNSET all the properties -ALTER VIEW testView UNSET TBLPROPERTIES ('propA', 'propB'); -SHOW TBLPROPERTIES testView; +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propA', 'propB'); +SHOW TBLPROPERTIES vt.testView; -ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propC'='300', 'propD'='400'); -SHOW TBLPROPERTIES testView; +ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propC'='300', 'propD'='400'); +SHOW TBLPROPERTIES vt.testView; -- UNSET a subset of the properties -ALTER VIEW testView UNSET TBLPROPERTIES ('propA', 'propC'); -SHOW TBLPROPERTIES testView; +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propA', 'propC'); +SHOW TBLPROPERTIES vt.testView; -- the same property being UNSET multiple times -ALTER VIEW testView UNSET TBLPROPERTIES ('propD', 'propD', 'propD'); -SHOW TBLPROPERTIES testView; +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propD', 'propD', 'propD'); +SHOW TBLPROPERTIES vt.testView; -ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propB' = '200', 'propC'='300', 'propD'='400'); -SHOW TBLPROPERTIES testView; +ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propB' = '200', 'propC'='300', 'propD'='400'); +SHOW TBLPROPERTIES vt.testView; -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER VIEW testView UNSET TBLPROPERTIES IF EXISTS ('propC', 'propD', 'propD', 'propC', 'propZ'); -SHOW TBLPROPERTIES testView; +ALTER VIEW vt.testView UNSET TBLPROPERTIES IF EXISTS ('propC', 'propD', 'propD', 'propC', 'propZ'); +SHOW TBLPROPERTIES vt.testView; -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER VIEW testView UNSET TBLPROPERTIES IF EXISTS ('propB', 'propC', 'propD', 'propF'); -SHOW TBLPROPERTIES testView; +ALTER VIEW vt.testView UNSET TBLPROPERTIES IF EXISTS ('propB', 'propC', 'propD', 'propF'); +SHOW TBLPROPERTIES vt.testView; + +DROP VIEW vt.testView; +DROP DATABASE vt; \ No newline at end of file diff --git ql/src/test/results/clientpositive/add_part_exist.q.out ql/src/test/results/clientpositive/add_part_exist.q.out index 4c22d6a..3af2360 100644 --- ql/src/test/results/clientpositive/add_part_exist.q.out +++ ql/src/test/results/clientpositive/add_part_exist.q.out @@ -99,85 +99,92 @@ POSTHOOK: query: -- Test ALTER TABLE ADD PARTITION in non-default Database CREATE DATABASE add_part_test_db POSTHOOK: type: CREATEDATABASE POSTHOOK: Output: database:add_part_test_db -PREHOOK: query: USE add_part_test_db -PREHOOK: type: SWITCHDATABASE -PREHOOK: Input: database:add_part_test_db -POSTHOOK: query: USE add_part_test_db -POSTHOOK: type: SWITCHDATABASE -POSTHOOK: Input: database:add_part_test_db -PREHOOK: query: SHOW TABLES -PREHOOK: type: SHOWTABLES -POSTHOOK: query: SHOW TABLES -POSTHOOK: type: SHOWTABLES -PREHOOK: query: CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING) +PREHOOK: query: CREATE TABLE add_part_test_db.add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: add_part_test_db@add_part_test +PREHOOK: Output: add_part_test_db@add_part_test_db.add_part_test PREHOOK: Output: database:add_part_test_db -POSTHOOK: query: CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING) +POSTHOOK: query: CREATE TABLE add_part_test_db.add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: add_part_test_db@add_part_test +POSTHOOK: Output: add_part_test_db@add_part_test_db.add_part_test POSTHOOK: Output: database:add_part_test_db -PREHOOK: query: SHOW PARTITIONS add_part_test +PREHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: add_part_test_db@add_part_test -POSTHOOK: query: SHOW PARTITIONS add_part_test +POSTHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: add_part_test_db@add_part_test -PREHOOK: query: ALTER TABLE add_part_test ADD PARTITION (ds='2010-01-01') +PREHOOK: query: ALTER TABLE add_part_test_db.add_part_test ADD PARTITION (ds='2010-01-01') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: add_part_test_db@add_part_test -POSTHOOK: query: ALTER TABLE add_part_test ADD PARTITION (ds='2010-01-01') +POSTHOOK: query: ALTER TABLE add_part_test_db.add_part_test ADD PARTITION (ds='2010-01-01') POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Output: add_part_test_db@add_part_test POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-01 -PREHOOK: query: SHOW PARTITIONS add_part_test +PREHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: add_part_test_db@add_part_test -POSTHOOK: query: SHOW PARTITIONS add_part_test +POSTHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: add_part_test_db@add_part_test ds=2010-01-01 -PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') +PREHOOK: query: ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: add_part_test_db@add_part_test -POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') +POSTHOOK: query: ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Output: add_part_test_db@add_part_test -PREHOOK: query: SHOW PARTITIONS add_part_test +PREHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: add_part_test_db@add_part_test -POSTHOOK: query: SHOW PARTITIONS add_part_test +POSTHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: add_part_test_db@add_part_test ds=2010-01-01 -PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02') +PREHOOK: query: ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: add_part_test_db@add_part_test -POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02') +POSTHOOK: query: ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02') POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Output: add_part_test_db@add_part_test POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-02 -PREHOOK: query: SHOW PARTITIONS add_part_test +PREHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: add_part_test_db@add_part_test -POSTHOOK: query: SHOW PARTITIONS add_part_test +POSTHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: add_part_test_db@add_part_test ds=2010-01-01 ds=2010-01-02 -PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03') +PREHOOK: query: ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: add_part_test_db@add_part_test -POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03') +POSTHOOK: query: ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03') POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Output: add_part_test_db@add_part_test POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-03 -PREHOOK: query: SHOW PARTITIONS add_part_test +PREHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: add_part_test_db@add_part_test -POSTHOOK: query: SHOW PARTITIONS add_part_test +POSTHOOK: query: SHOW PARTITIONS add_part_test_db.add_part_test POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: add_part_test_db@add_part_test ds=2010-01-01 ds=2010-01-02 ds=2010-01-03 +PREHOOK: query: DROP TABLE add_part_test_db.add_part_test +PREHOOK: type: DROPTABLE +PREHOOK: Input: add_part_test_db@add_part_test +PREHOOK: Output: add_part_test_db@add_part_test +POSTHOOK: query: DROP TABLE add_part_test_db.add_part_test +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: add_part_test_db@add_part_test +POSTHOOK: Output: add_part_test_db@add_part_test +PREHOOK: query: DROP DATABASE add_part_test_db +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:add_part_test_db +PREHOOK: Output: database:add_part_test_db +POSTHOOK: query: DROP DATABASE add_part_test_db +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:add_part_test_db +POSTHOOK: Output: database:add_part_test_db diff --git ql/src/test/results/clientpositive/alter1.q.out ql/src/test/results/clientpositive/alter1.q.out index 1cfaf75..e34e27b 100644 --- ql/src/test/results/clientpositive/alter1.q.out +++ ql/src/test/results/clientpositive/alter1.q.out @@ -210,209 +210,198 @@ POSTHOOK: query: -- With non-default Database CREATE DATABASE alter1_db POSTHOOK: type: CREATEDATABASE POSTHOOK: Output: database:alter1_db -PREHOOK: query: USE alter1_db -PREHOOK: type: SWITCHDATABASE -PREHOOK: Input: database:alter1_db -POSTHOOK: query: USE alter1_db -POSTHOOK: type: SWITCHDATABASE -POSTHOOK: Input: database:alter1_db -PREHOOK: query: SHOW TABLES +PREHOOK: query: SHOW TABLES alter1_db PREHOOK: type: SHOWTABLES -POSTHOOK: query: SHOW TABLES +POSTHOOK: query: SHOW TABLES alter1_db POSTHOOK: type: SHOWTABLES -PREHOOK: query: CREATE TABLE alter1(a INT, b INT) +PREHOOK: query: CREATE TABLE alter1_db.alter1(a INT, b INT) PREHOOK: type: CREATETABLE -PREHOOK: Output: alter1_db@alter1 +PREHOOK: Output: alter1_db@alter1_db.alter1 PREHOOK: Output: database:alter1_db -POSTHOOK: query: CREATE TABLE alter1(a INT, b INT) +POSTHOOK: query: CREATE TABLE alter1_db.alter1(a INT, b INT) POSTHOOK: type: CREATETABLE POSTHOOK: Output: alter1_db@alter1 +POSTHOOK: Output: alter1_db@alter1_db.alter1 POSTHOOK: Output: database:alter1_db -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a int b int #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='3') +PREHOOK: query: ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('a'='1', 'c'='3') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='3') +POSTHOOK: query: ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('a'='1', 'c'='3') POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a int b int #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3') +PREHOOK: query: ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3') +POSTHOOK: query: ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3') POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a int b int #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE') +PREHOOK: query: ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE') +POSTHOOK: query: ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE') POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a int b int #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE') +PREHOOK: query: ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE') +POSTHOOK: query: ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE') POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a int b int #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='9') +PREHOOK: query: ALTER TABLE alter1_db.alter1 SET SERDEPROPERTIES('s1'='9') PREHOOK: type: ALTERTABLE_SERDEPROPERTIES PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='9') +POSTHOOK: query: ALTER TABLE alter1_db.alter1 SET SERDEPROPERTIES('s1'='9') POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a int b int #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20') +PREHOOK: query: ALTER TABLE alter1_db.alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20') PREHOOK: type: ALTERTABLE_SERDEPROPERTIES PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20') +POSTHOOK: query: ALTER TABLE alter1_db.alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20') POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a int b int #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9') +PREHOOK: query: ALTER TABLE alter1_db.alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9') PREHOOK: type: ALTERTABLE_SERIALIZER PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9') +POSTHOOK: query: ALTER TABLE alter1_db.alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9') POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a string b string #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe' +PREHOOK: query: ALTER TABLE alter1_db.alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe' PREHOOK: type: ALTERTABLE_SERIALIZER PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe' +POSTHOOK: query: ALTER TABLE alter1_db.alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe' POSTHOOK: type: ALTERTABLE_SERIALIZER POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE EXTENDED alter1 +PREHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE EXTENDED alter1 +POSTHOOK: query: DESCRIBE EXTENDED alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a string b string #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE alter1 REPLACE COLUMNS (a int, b int, c string) +PREHOOK: query: ALTER TABLE alter1_db.alter1 REPLACE COLUMNS (a int, b int, c string) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: ALTER TABLE alter1 REPLACE COLUMNS (a int, b int, c string) +POSTHOOK: query: ALTER TABLE alter1_db.alter1 REPLACE COLUMNS (a int, b int, c string) POSTHOOK: type: ALTERTABLE_REPLACECOLS POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: DESCRIBE alter1 +PREHOOK: query: DESCRIBE alter1_db.alter1 PREHOOK: type: DESCTABLE PREHOOK: Input: alter1_db@alter1 -POSTHOOK: query: DESCRIBE alter1 +POSTHOOK: query: DESCRIBE alter1_db.alter1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: alter1_db@alter1 a int b int c string -PREHOOK: query: DROP TABLE alter1 +PREHOOK: query: DROP TABLE alter1_db.alter1 PREHOOK: type: DROPTABLE PREHOOK: Input: alter1_db@alter1 PREHOOK: Output: alter1_db@alter1 -POSTHOOK: query: DROP TABLE alter1 +POSTHOOK: query: DROP TABLE alter1_db.alter1 POSTHOOK: type: DROPTABLE POSTHOOK: Input: alter1_db@alter1 POSTHOOK: Output: alter1_db@alter1 -PREHOOK: query: USE default -PREHOOK: type: SWITCHDATABASE -PREHOOK: Input: database:default -POSTHOOK: query: USE default -POSTHOOK: type: SWITCHDATABASE -POSTHOOK: Input: database:default PREHOOK: query: DROP DATABASE alter1_db PREHOOK: type: DROPDATABASE PREHOOK: Input: database:alter1_db diff --git ql/src/test/results/clientpositive/alter_char1.q.out ql/src/test/results/clientpositive/alter_char1.q.out index 017da60..7111362 100644 --- ql/src/test/results/clientpositive/alter_char1.q.out +++ ql/src/test/results/clientpositive/alter_char1.q.out @@ -1,38 +1,41 @@ PREHOOK: query: -- SORT_QUERY_RESULTS -drop table alter_char_1 -PREHOOK: type: DROPTABLE +create database ac +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:ac POSTHOOK: query: -- SORT_QUERY_RESULTS -drop table alter_char_1 -POSTHOOK: type: DROPTABLE -PREHOOK: query: create table alter_char_1 (key string, value string) +create database ac +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:ac +PREHOOK: query: create table ac.alter_char_1 (key string, value string) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@alter_char_1 -POSTHOOK: query: create table alter_char_1 (key string, value string) +PREHOOK: Output: ac@ac.alter_char_1 +PREHOOK: Output: database:ac +POSTHOOK: query: create table ac.alter_char_1 (key string, value string) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@alter_char_1 -PREHOOK: query: insert overwrite table alter_char_1 +POSTHOOK: Output: ac@ac.alter_char_1 +POSTHOOK: Output: ac@alter_char_1 +POSTHOOK: Output: database:ac +PREHOOK: query: insert overwrite table ac.alter_char_1 select key, value from src order by key limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@alter_char_1 -POSTHOOK: query: insert overwrite table alter_char_1 +PREHOOK: Output: ac@alter_char_1 +POSTHOOK: query: insert overwrite table ac.alter_char_1 select key, value from src order by key limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@alter_char_1 +POSTHOOK: Output: ac@alter_char_1 POSTHOOK: Lineage: alter_char_1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: alter_char_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: select * from alter_char_1 +PREHOOK: query: select * from ac.alter_char_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### -POSTHOOK: query: select * from alter_char_1 +POSTHOOK: query: select * from ac.alter_char_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### 0 val_0 0 val_0 @@ -40,24 +43,24 @@ POSTHOOK: Input: default@alter_char_1 10 val_10 100 val_100 PREHOOK: query: -- change column to char -alter table alter_char_1 change column value value char(20) +alter table ac.alter_char_1 change column value value char(20) PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alter_char_1 -PREHOOK: Output: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 +PREHOOK: Output: ac@alter_char_1 POSTHOOK: query: -- change column to char -alter table alter_char_1 change column value value char(20) +alter table ac.alter_char_1 change column value value char(20) POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alter_char_1 -POSTHOOK: Output: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 +POSTHOOK: Output: ac@alter_char_1 PREHOOK: query: -- contents should still look the same -select * from alter_char_1 +select * from ac.alter_char_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### POSTHOOK: query: -- contents should still look the same -select * from alter_char_1 +select * from ac.alter_char_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### 0 val_0 0 val_0 @@ -65,24 +68,24 @@ POSTHOOK: Input: default@alter_char_1 10 val_10 100 val_100 PREHOOK: query: -- change column to smaller char -alter table alter_char_1 change column value value char(3) +alter table ac.alter_char_1 change column value value char(3) PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alter_char_1 -PREHOOK: Output: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 +PREHOOK: Output: ac@alter_char_1 POSTHOOK: query: -- change column to smaller char -alter table alter_char_1 change column value value char(3) +alter table ac.alter_char_1 change column value value char(3) POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alter_char_1 -POSTHOOK: Output: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 +POSTHOOK: Output: ac@alter_char_1 PREHOOK: query: -- value column should be truncated now -select * from alter_char_1 +select * from ac.alter_char_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### POSTHOOK: query: -- value column should be truncated now -select * from alter_char_1 +select * from ac.alter_char_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### 0 val 0 val @@ -90,24 +93,24 @@ POSTHOOK: Input: default@alter_char_1 10 val 100 val PREHOOK: query: -- change back to bigger char -alter table alter_char_1 change column value value char(20) +alter table ac.alter_char_1 change column value value char(20) PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alter_char_1 -PREHOOK: Output: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 +PREHOOK: Output: ac@alter_char_1 POSTHOOK: query: -- change back to bigger char -alter table alter_char_1 change column value value char(20) +alter table ac.alter_char_1 change column value value char(20) POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alter_char_1 -POSTHOOK: Output: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 +POSTHOOK: Output: ac@alter_char_1 PREHOOK: query: -- column values should be full size again -select * from alter_char_1 +select * from ac.alter_char_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### POSTHOOK: query: -- column values should be full size again -select * from alter_char_1 +select * from ac.alter_char_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### 0 val_0 0 val_0 @@ -115,60 +118,68 @@ POSTHOOK: Input: default@alter_char_1 10 val_10 100 val_100 PREHOOK: query: -- add char column -alter table alter_char_1 add columns (key2 int, value2 char(10)) +alter table ac.alter_char_1 add columns (key2 int, value2 char(10)) PREHOOK: type: ALTERTABLE_ADDCOLS -PREHOOK: Input: default@alter_char_1 -PREHOOK: Output: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 +PREHOOK: Output: ac@alter_char_1 POSTHOOK: query: -- add char column -alter table alter_char_1 add columns (key2 int, value2 char(10)) +alter table ac.alter_char_1 add columns (key2 int, value2 char(10)) POSTHOOK: type: ALTERTABLE_ADDCOLS -POSTHOOK: Input: default@alter_char_1 -POSTHOOK: Output: default@alter_char_1 -PREHOOK: query: select * from alter_char_1 +POSTHOOK: Input: ac@alter_char_1 +POSTHOOK: Output: ac@alter_char_1 +PREHOOK: query: select * from ac.alter_char_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### -POSTHOOK: query: select * from alter_char_1 +POSTHOOK: query: select * from ac.alter_char_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### 0 val_0 NULL NULL 0 val_0 NULL NULL 0 val_0 NULL NULL 10 val_10 NULL NULL 100 val_100 NULL NULL -PREHOOK: query: insert overwrite table alter_char_1 +PREHOOK: query: insert overwrite table ac.alter_char_1 select key, value, key, value from src order by key limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@alter_char_1 -POSTHOOK: query: insert overwrite table alter_char_1 +PREHOOK: Output: ac@alter_char_1 +POSTHOOK: query: insert overwrite table ac.alter_char_1 select key, value, key, value from src order by key limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@alter_char_1 +POSTHOOK: Output: ac@alter_char_1 POSTHOOK: Lineage: alter_char_1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: alter_char_1.key2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: alter_char_1.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: alter_char_1.value2 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: select * from alter_char_1 +PREHOOK: query: select * from ac.alter_char_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_char_1 +PREHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### -POSTHOOK: query: select * from alter_char_1 +POSTHOOK: query: select * from ac.alter_char_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 #### A masked pattern was here #### 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 10 val_10 10 val_10 100 val_100 100 val_100 -PREHOOK: query: drop table alter_char_1 +PREHOOK: query: drop table ac.alter_char_1 PREHOOK: type: DROPTABLE -PREHOOK: Input: default@alter_char_1 -PREHOOK: Output: default@alter_char_1 -POSTHOOK: query: drop table alter_char_1 +PREHOOK: Input: ac@alter_char_1 +PREHOOK: Output: ac@alter_char_1 +POSTHOOK: query: drop table ac.alter_char_1 POSTHOOK: type: DROPTABLE -POSTHOOK: Input: default@alter_char_1 -POSTHOOK: Output: default@alter_char_1 +POSTHOOK: Input: ac@alter_char_1 +POSTHOOK: Output: ac@alter_char_1 +PREHOOK: query: drop database ac +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:ac +PREHOOK: Output: database:ac +POSTHOOK: query: drop database ac +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:ac +POSTHOOK: Output: database:ac diff --git ql/src/test/results/clientpositive/alter_index.q.out ql/src/test/results/clientpositive/alter_index.q.out index 2093e2f..c69127a 100644 --- ql/src/test/results/clientpositive/alter_index.q.out +++ ql/src/test/results/clientpositive/alter_index.q.out @@ -4,10 +4,10 @@ PREHOOK: Input: default@src POSTHOOK: query: drop index src_index_8 on src POSTHOOK: type: DROPINDEX POSTHOOK: Input: default@src -PREHOOK: query: create index src_index_8 on table src(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2") +PREHOOK: query: create index src_index_8 on table default.src(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2") PREHOOK: type: CREATEINDEX PREHOOK: Input: default@src -POSTHOOK: query: create index src_index_8 on table src(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2") +POSTHOOK: query: create index src_index_8 on table default.src(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2") POSTHOOK: type: CREATEINDEX POSTHOOK: Input: default@src POSTHOOK: Output: default@default__src_src_index_8__ @@ -22,9 +22,9 @@ _bucketname string _offsets array #### A masked pattern was here #### -PREHOOK: query: alter index src_index_8 on src set IDXPROPERTIES ("prop1"="val1_new", "prop3"="val3") +PREHOOK: query: alter index src_index_8 on default.src set IDXPROPERTIES ("prop1"="val1_new", "prop3"="val3") PREHOOK: type: ALTERINDEX_PROPS -POSTHOOK: query: alter index src_index_8 on src set IDXPROPERTIES ("prop1"="val1_new", "prop3"="val3") +POSTHOOK: query: alter index src_index_8 on default.src set IDXPROPERTIES ("prop1"="val1_new", "prop3"="val3") POSTHOOK: type: ALTERINDEX_PROPS PREHOOK: query: desc extended default__src_src_index_8__ PREHOOK: type: DESCTABLE @@ -37,10 +37,10 @@ _bucketname string _offsets array #### A masked pattern was here #### -PREHOOK: query: drop index src_index_8 on src +PREHOOK: query: drop index src_index_8 on default.src PREHOOK: type: DROPINDEX PREHOOK: Input: default@src -POSTHOOK: query: drop index src_index_8 on src +POSTHOOK: query: drop index src_index_8 on default.src POSTHOOK: type: DROPINDEX POSTHOOK: Input: default@src PREHOOK: query: show tables diff --git ql/src/test/results/clientpositive/alter_partition_coltype.q.out ql/src/test/results/clientpositive/alter_partition_coltype.q.out index 25eb48c..aab29df 100644 --- ql/src/test/results/clientpositive/alter_partition_coltype.q.out +++ ql/src/test/results/clientpositive/alter_partition_coltype.q.out @@ -939,51 +939,58 @@ POSTHOOK: query: drop table alter_coltype POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@alter_coltype POSTHOOK: Output: default@alter_coltype -PREHOOK: query: create table alterdynamic_part_table(intcol string) partitioned by (partcol1 string, partcol2 string) +PREHOOK: query: create database pt +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:pt +POSTHOOK: query: create database pt +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:pt +PREHOOK: query: create table pt.alterdynamic_part_table(intcol string) partitioned by (partcol1 string, partcol2 string) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@alterdynamic_part_table -POSTHOOK: query: create table alterdynamic_part_table(intcol string) partitioned by (partcol1 string, partcol2 string) +PREHOOK: Output: database:pt +PREHOOK: Output: pt@pt.alterdynamic_part_table +POSTHOOK: query: create table pt.alterdynamic_part_table(intcol string) partitioned by (partcol1 string, partcol2 string) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@alterdynamic_part_table -PREHOOK: query: insert into table alterdynamic_part_table partition(partcol1, partcol2) select '1', '1', '1' from src where key=150 limit 5 +POSTHOOK: Output: database:pt +POSTHOOK: Output: pt@alterdynamic_part_table +POSTHOOK: Output: pt@pt.alterdynamic_part_table +PREHOOK: query: insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select '1', '1', '1' from src where key=150 limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@alterdynamic_part_table -POSTHOOK: query: insert into table alterdynamic_part_table partition(partcol1, partcol2) select '1', '1', '1' from src where key=150 limit 5 +PREHOOK: Output: pt@alterdynamic_part_table +POSTHOOK: query: insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select '1', '1', '1' from src where key=150 limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@alterdynamic_part_table@partcol1=1/partcol2=1 +POSTHOOK: Output: pt@alterdynamic_part_table@partcol1=1/partcol2=1 POSTHOOK: Lineage: alterdynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE [] -PREHOOK: query: insert into table alterdynamic_part_table partition(partcol1, partcol2) select '1', '2', '1' from src where key=150 limit 5 +PREHOOK: query: insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select '1', '2', '1' from src where key=150 limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@alterdynamic_part_table -POSTHOOK: query: insert into table alterdynamic_part_table partition(partcol1, partcol2) select '1', '2', '1' from src where key=150 limit 5 +PREHOOK: Output: pt@alterdynamic_part_table +POSTHOOK: query: insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select '1', '2', '1' from src where key=150 limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@alterdynamic_part_table@partcol1=2/partcol2=1 +POSTHOOK: Output: pt@alterdynamic_part_table@partcol1=2/partcol2=1 POSTHOOK: Lineage: alterdynamic_part_table PARTITION(partcol1=2,partcol2=1).intcol SIMPLE [] -PREHOOK: query: insert into table alterdynamic_part_table partition(partcol1, partcol2) select NULL, '1', '1' from src where key=150 limit 5 +PREHOOK: query: insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select NULL, '1', '1' from src where key=150 limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@alterdynamic_part_table -POSTHOOK: query: insert into table alterdynamic_part_table partition(partcol1, partcol2) select NULL, '1', '1' from src where key=150 limit 5 +PREHOOK: Output: pt@alterdynamic_part_table +POSTHOOK: query: insert into table pt.alterdynamic_part_table partition(partcol1, partcol2) select NULL, '1', '1' from src where key=150 limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@alterdynamic_part_table@partcol1=1/partcol2=1 +POSTHOOK: Output: pt@alterdynamic_part_table@partcol1=1/partcol2=1 POSTHOOK: Lineage: alterdynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE [] -PREHOOK: query: alter table alterdynamic_part_table partition column (partcol1 int) +PREHOOK: query: alter table pt.alterdynamic_part_table partition column (partcol1 int) PREHOOK: type: ALTERTABLE_PARTCOLTYPE -PREHOOK: Input: default@alterdynamic_part_table -POSTHOOK: query: alter table alterdynamic_part_table partition column (partcol1 int) +PREHOOK: Input: pt@alterdynamic_part_table +POSTHOOK: query: alter table pt.alterdynamic_part_table partition column (partcol1 int) POSTHOOK: type: ALTERTABLE_PARTCOLTYPE -POSTHOOK: Input: default@alterdynamic_part_table -POSTHOOK: Output: default@alterdynamic_part_table -PREHOOK: query: explain extended select intcol from alterdynamic_part_table where partcol1='1' and partcol2='1' +POSTHOOK: Input: pt@alterdynamic_part_table +POSTHOOK: Output: pt@alterdynamic_part_table +PREHOOK: query: explain extended select intcol from pt.alterdynamic_part_table where partcol1='1' and partcol2='1' PREHOOK: type: QUERY -POSTHOOK: query: explain extended select intcol from alterdynamic_part_table where partcol1='1' and partcol2='1' +POSTHOOK: query: explain extended select intcol from pt.alterdynamic_part_table where partcol1='1' and partcol2='1' POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: @@ -991,6 +998,7 @@ TOK_QUERY TOK_FROM TOK_TABREF TOK_TABNAME + pt alterdynamic_part_table TOK_INSERT TOK_DESTINATION @@ -1067,7 +1075,7 @@ STAGE PLANS: columns.comments columns.types string #### A masked pattern was here #### - name default.alterdynamic_part_table + name pt.alterdynamic_part_table numFiles 2 numRows 1 partition_columns partcol1/partcol2 @@ -1088,7 +1096,7 @@ STAGE PLANS: columns.comments columns.types string #### A masked pattern was here #### - name default.alterdynamic_part_table + name pt.alterdynamic_part_table partition_columns partcol1/partcol2 partition_columns.types int:string serialization.ddl struct alterdynamic_part_table { string intcol} @@ -1096,10 +1104,10 @@ STAGE PLANS: serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.alterdynamic_part_table - name: default.alterdynamic_part_table + name: pt.alterdynamic_part_table + name: pt.alterdynamic_part_table Truncated Path -> Alias: - /alterdynamic_part_table/partcol1=1/partcol2=1 [alterdynamic_part_table] + /pt.db/alterdynamic_part_table/partcol1=1/partcol2=1 [alterdynamic_part_table] Stage: Stage-0 Fetch Operator @@ -1107,9 +1115,9 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: explain extended select intcol from alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__') +PREHOOK: query: explain extended select intcol from pt.alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__') PREHOOK: type: QUERY -POSTHOOK: query: explain extended select intcol from alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__') +POSTHOOK: query: explain extended select intcol from pt.alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__') POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: @@ -1117,6 +1125,7 @@ TOK_QUERY TOK_FROM TOK_TABREF TOK_TABNAME + pt alterdynamic_part_table TOK_INSERT TOK_DESTINATION @@ -1203,7 +1212,7 @@ STAGE PLANS: columns.comments columns.types string #### A masked pattern was here #### - name default.alterdynamic_part_table + name pt.alterdynamic_part_table numFiles 1 numRows 1 partition_columns partcol1/partcol2 @@ -1224,7 +1233,7 @@ STAGE PLANS: columns.comments columns.types string #### A masked pattern was here #### - name default.alterdynamic_part_table + name pt.alterdynamic_part_table partition_columns partcol1/partcol2 partition_columns.types int:string serialization.ddl struct alterdynamic_part_table { string intcol} @@ -1232,10 +1241,10 @@ STAGE PLANS: serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.alterdynamic_part_table - name: default.alterdynamic_part_table + name: pt.alterdynamic_part_table + name: pt.alterdynamic_part_table Truncated Path -> Alias: - /alterdynamic_part_table/partcol1=2/partcol2=1 [alterdynamic_part_table] + /pt.db/alterdynamic_part_table/partcol1=2/partcol2=1 [alterdynamic_part_table] Stage: Stage-0 Fetch Operator @@ -1243,14 +1252,30 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select intcol from alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__') +PREHOOK: query: select intcol from pt.alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__') PREHOOK: type: QUERY -PREHOOK: Input: default@alterdynamic_part_table -PREHOOK: Input: default@alterdynamic_part_table@partcol1=2/partcol2=1 +PREHOOK: Input: pt@alterdynamic_part_table +PREHOOK: Input: pt@alterdynamic_part_table@partcol1=2/partcol2=1 #### A masked pattern was here #### -POSTHOOK: query: select intcol from alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__') +POSTHOOK: query: select intcol from pt.alterdynamic_part_table where (partcol1='2' and partcol2='1')or (partcol1='1' and partcol2='__HIVE_DEFAULT_PARTITION__') POSTHOOK: type: QUERY -POSTHOOK: Input: default@alterdynamic_part_table -POSTHOOK: Input: default@alterdynamic_part_table@partcol1=2/partcol2=1 +POSTHOOK: Input: pt@alterdynamic_part_table +POSTHOOK: Input: pt@alterdynamic_part_table@partcol1=2/partcol2=1 #### A masked pattern was here #### 1 +PREHOOK: query: drop table pt.alterdynamic_part_table +PREHOOK: type: DROPTABLE +PREHOOK: Input: pt@alterdynamic_part_table +PREHOOK: Output: pt@alterdynamic_part_table +POSTHOOK: query: drop table pt.alterdynamic_part_table +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: pt@alterdynamic_part_table +POSTHOOK: Output: pt@alterdynamic_part_table +PREHOOK: query: drop database pt +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:pt +PREHOOK: Output: database:pt +POSTHOOK: query: drop database pt +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:pt +POSTHOOK: Output: database:pt diff --git ql/src/test/results/clientpositive/alter_skewed_table.q.out ql/src/test/results/clientpositive/alter_skewed_table.q.out index e6bfc5a..3e09f78 100644 --- ql/src/test/results/clientpositive/alter_skewed_table.q.out +++ ql/src/test/results/clientpositive/alter_skewed_table.q.out @@ -92,27 +92,34 @@ POSTHOOK: query: drop table original POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@original POSTHOOK: Output: default@original -PREHOOK: query: create table original2 (key STRING, value STRING) +PREHOOK: query: create database skew_test +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:skew_test +POSTHOOK: query: create database skew_test +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:skew_test +PREHOOK: query: create table skew_test.original2 (key STRING, value STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@original2 -POSTHOOK: query: create table original2 (key STRING, value STRING) +PREHOOK: Output: database:skew_test +PREHOOK: Output: skew_test@skew_test.original2 +POSTHOOK: query: create table skew_test.original2 (key STRING, value STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@original2 -PREHOOK: query: describe formatted original2 +POSTHOOK: Output: database:skew_test +POSTHOOK: Output: skew_test@original2 +POSTHOOK: Output: skew_test@skew_test.original2 +PREHOOK: query: describe formatted skew_test.original2 PREHOOK: type: DESCTABLE -PREHOOK: Input: default@original2 -POSTHOOK: query: describe formatted original2 +PREHOOK: Input: skew_test@original2 +POSTHOOK: query: describe formatted skew_test.original2 POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@original2 +POSTHOOK: Input: skew_test@original2 # col_name data_type comment key string value string # Detailed Table Information -Database: default +Database: skew_test #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -131,27 +138,27 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 -PREHOOK: query: alter table original2 SKEWED BY (key, value) ON ((1,1),(5,6)) +PREHOOK: query: alter table skew_test.original2 SKEWED BY (key, value) ON ((1,1),(5,6)) PREHOOK: type: ALTERTABLE_SKEWED -PREHOOK: Input: default@original2 -PREHOOK: Output: default@original2 -POSTHOOK: query: alter table original2 SKEWED BY (key, value) ON ((1,1),(5,6)) +PREHOOK: Input: skew_test@original2 +PREHOOK: Output: skew_test@original2 +POSTHOOK: query: alter table skew_test.original2 SKEWED BY (key, value) ON ((1,1),(5,6)) POSTHOOK: type: ALTERTABLE_SKEWED -POSTHOOK: Input: default@original2 -POSTHOOK: Output: default@original2 -PREHOOK: query: describe formatted original2 +POSTHOOK: Input: skew_test@original2 +POSTHOOK: Output: skew_test@original2 +PREHOOK: query: describe formatted skew_test.original2 PREHOOK: type: DESCTABLE -PREHOOK: Input: default@original2 -POSTHOOK: query: describe formatted original2 +PREHOOK: Input: skew_test@original2 +POSTHOOK: query: describe formatted skew_test.original2 POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@original2 +POSTHOOK: Input: skew_test@original2 # col_name data_type comment key string value string # Detailed Table Information -Database: default +Database: skew_test #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -178,35 +185,36 @@ Skewed Columns: [key, value] Skewed Values: [[1, 1], [5, 6]] Storage Desc Params: serialization.format 1 -PREHOOK: query: drop table original2 +PREHOOK: query: drop table skew_test.original2 PREHOOK: type: DROPTABLE -PREHOOK: Input: default@original2 -PREHOOK: Output: default@original2 -POSTHOOK: query: drop table original2 +PREHOOK: Input: skew_test@original2 +PREHOOK: Output: skew_test@original2 +POSTHOOK: query: drop table skew_test.original2 POSTHOOK: type: DROPTABLE -POSTHOOK: Input: default@original2 -POSTHOOK: Output: default@original2 -PREHOOK: query: create table original3 (key STRING, value STRING) SKEWED BY (key, value) ON ((1,1),(5,6)) +POSTHOOK: Input: skew_test@original2 +POSTHOOK: Output: skew_test@original2 +PREHOOK: query: create table skew_test.original3 (key STRING, value STRING) SKEWED BY (key, value) ON ((1,1),(5,6)) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@original3 -POSTHOOK: query: create table original3 (key STRING, value STRING) SKEWED BY (key, value) ON ((1,1),(5,6)) +PREHOOK: Output: database:skew_test +PREHOOK: Output: skew_test@skew_test.original3 +POSTHOOK: query: create table skew_test.original3 (key STRING, value STRING) SKEWED BY (key, value) ON ((1,1),(5,6)) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@original3 -PREHOOK: query: describe formatted original3 +POSTHOOK: Output: database:skew_test +POSTHOOK: Output: skew_test@original3 +POSTHOOK: Output: skew_test@skew_test.original3 +PREHOOK: query: describe formatted skew_test.original3 PREHOOK: type: DESCTABLE -PREHOOK: Input: default@original3 -POSTHOOK: query: describe formatted original3 +PREHOOK: Input: skew_test@original3 +POSTHOOK: query: describe formatted skew_test.original3 POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@original3 +POSTHOOK: Input: skew_test@original3 # col_name data_type comment key string value string # Detailed Table Information -Database: default +Database: skew_test #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -227,27 +235,27 @@ Skewed Columns: [key, value] Skewed Values: [[1, 1], [5, 6]] Storage Desc Params: serialization.format 1 -PREHOOK: query: alter table original3 not skewed +PREHOOK: query: alter table skew_test.original3 not skewed PREHOOK: type: ALTERTABLE_SKEWED -PREHOOK: Input: default@original3 -PREHOOK: Output: default@original3 -POSTHOOK: query: alter table original3 not skewed +PREHOOK: Input: skew_test@original3 +PREHOOK: Output: skew_test@original3 +POSTHOOK: query: alter table skew_test.original3 not skewed POSTHOOK: type: ALTERTABLE_SKEWED -POSTHOOK: Input: default@original3 -POSTHOOK: Output: default@original3 -PREHOOK: query: describe formatted original3 +POSTHOOK: Input: skew_test@original3 +POSTHOOK: Output: skew_test@original3 +PREHOOK: query: describe formatted skew_test.original3 PREHOOK: type: DESCTABLE -PREHOOK: Input: default@original3 -POSTHOOK: query: describe formatted original3 +PREHOOK: Input: skew_test@original3 +POSTHOOK: query: describe formatted skew_test.original3 POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@original3 +POSTHOOK: Input: skew_test@original3 # col_name data_type comment key string value string # Detailed Table Information -Database: default +Database: skew_test #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -272,11 +280,19 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 -PREHOOK: query: drop table original3 +PREHOOK: query: drop table skew_test.original3 PREHOOK: type: DROPTABLE -PREHOOK: Input: default@original3 -PREHOOK: Output: default@original3 -POSTHOOK: query: drop table original3 +PREHOOK: Input: skew_test@original3 +PREHOOK: Output: skew_test@original3 +POSTHOOK: query: drop table skew_test.original3 POSTHOOK: type: DROPTABLE -POSTHOOK: Input: default@original3 -POSTHOOK: Output: default@original3 +POSTHOOK: Input: skew_test@original3 +POSTHOOK: Output: skew_test@original3 +PREHOOK: query: drop database skew_test +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:skew_test +PREHOOK: Output: database:skew_test +POSTHOOK: query: drop database skew_test +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:skew_test +POSTHOOK: Output: database:skew_test diff --git ql/src/test/results/clientpositive/alter_varchar1.q.out ql/src/test/results/clientpositive/alter_varchar1.q.out index e74a7ed..558d392 100644 --- ql/src/test/results/clientpositive/alter_varchar1.q.out +++ ql/src/test/results/clientpositive/alter_varchar1.q.out @@ -1,38 +1,41 @@ PREHOOK: query: -- SORT_QUERY_RESULTS -drop table alter_varchar_1 -PREHOOK: type: DROPTABLE +create database avc +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:avc POSTHOOK: query: -- SORT_QUERY_RESULTS -drop table alter_varchar_1 -POSTHOOK: type: DROPTABLE -PREHOOK: query: create table alter_varchar_1 (key string, value string) +create database avc +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:avc +PREHOOK: query: create table avc.alter_varchar_1 (key string, value string) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@alter_varchar_1 -POSTHOOK: query: create table alter_varchar_1 (key string, value string) +PREHOOK: Output: avc@avc.alter_varchar_1 +PREHOOK: Output: database:avc +POSTHOOK: query: create table avc.alter_varchar_1 (key string, value string) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@alter_varchar_1 -PREHOOK: query: insert overwrite table alter_varchar_1 +POSTHOOK: Output: avc@alter_varchar_1 +POSTHOOK: Output: avc@avc.alter_varchar_1 +POSTHOOK: Output: database:avc +PREHOOK: query: insert overwrite table avc.alter_varchar_1 select key, value from src order by key limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@alter_varchar_1 -POSTHOOK: query: insert overwrite table alter_varchar_1 +PREHOOK: Output: avc@alter_varchar_1 +POSTHOOK: query: insert overwrite table avc.alter_varchar_1 select key, value from src order by key limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@alter_varchar_1 +POSTHOOK: Output: avc@alter_varchar_1 POSTHOOK: Lineage: alter_varchar_1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: alter_varchar_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: select * from alter_varchar_1 +PREHOOK: query: select * from avc.alter_varchar_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### -POSTHOOK: query: select * from alter_varchar_1 +POSTHOOK: query: select * from avc.alter_varchar_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### 0 val_0 0 val_0 @@ -40,24 +43,24 @@ POSTHOOK: Input: default@alter_varchar_1 10 val_10 100 val_100 PREHOOK: query: -- change column to varchar -alter table alter_varchar_1 change column value value varchar(20) +alter table avc.alter_varchar_1 change column value value varchar(20) PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alter_varchar_1 -PREHOOK: Output: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 +PREHOOK: Output: avc@alter_varchar_1 POSTHOOK: query: -- change column to varchar -alter table alter_varchar_1 change column value value varchar(20) +alter table avc.alter_varchar_1 change column value value varchar(20) POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alter_varchar_1 -POSTHOOK: Output: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 +POSTHOOK: Output: avc@alter_varchar_1 PREHOOK: query: -- contents should still look the same -select * from alter_varchar_1 +select * from avc.alter_varchar_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### POSTHOOK: query: -- contents should still look the same -select * from alter_varchar_1 +select * from avc.alter_varchar_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### 0 val_0 0 val_0 @@ -65,24 +68,24 @@ POSTHOOK: Input: default@alter_varchar_1 10 val_10 100 val_100 PREHOOK: query: -- change column to smaller varchar -alter table alter_varchar_1 change column value value varchar(3) +alter table avc.alter_varchar_1 change column value value varchar(3) PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alter_varchar_1 -PREHOOK: Output: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 +PREHOOK: Output: avc@alter_varchar_1 POSTHOOK: query: -- change column to smaller varchar -alter table alter_varchar_1 change column value value varchar(3) +alter table avc.alter_varchar_1 change column value value varchar(3) POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alter_varchar_1 -POSTHOOK: Output: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 +POSTHOOK: Output: avc@alter_varchar_1 PREHOOK: query: -- value column should be truncated now -select * from alter_varchar_1 +select * from avc.alter_varchar_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### POSTHOOK: query: -- value column should be truncated now -select * from alter_varchar_1 +select * from avc.alter_varchar_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### 0 val 0 val @@ -90,24 +93,24 @@ POSTHOOK: Input: default@alter_varchar_1 10 val 100 val PREHOOK: query: -- change back to bigger varchar -alter table alter_varchar_1 change column value value varchar(20) +alter table avc.alter_varchar_1 change column value value varchar(20) PREHOOK: type: ALTERTABLE_RENAMECOL -PREHOOK: Input: default@alter_varchar_1 -PREHOOK: Output: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 +PREHOOK: Output: avc@alter_varchar_1 POSTHOOK: query: -- change back to bigger varchar -alter table alter_varchar_1 change column value value varchar(20) +alter table avc.alter_varchar_1 change column value value varchar(20) POSTHOOK: type: ALTERTABLE_RENAMECOL -POSTHOOK: Input: default@alter_varchar_1 -POSTHOOK: Output: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 +POSTHOOK: Output: avc@alter_varchar_1 PREHOOK: query: -- column values should be full size again -select * from alter_varchar_1 +select * from avc.alter_varchar_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### POSTHOOK: query: -- column values should be full size again -select * from alter_varchar_1 +select * from avc.alter_varchar_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### 0 val_0 0 val_0 @@ -115,60 +118,68 @@ POSTHOOK: Input: default@alter_varchar_1 10 val_10 100 val_100 PREHOOK: query: -- add varchar column -alter table alter_varchar_1 add columns (key2 int, value2 varchar(10)) +alter table avc.alter_varchar_1 add columns (key2 int, value2 varchar(10)) PREHOOK: type: ALTERTABLE_ADDCOLS -PREHOOK: Input: default@alter_varchar_1 -PREHOOK: Output: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 +PREHOOK: Output: avc@alter_varchar_1 POSTHOOK: query: -- add varchar column -alter table alter_varchar_1 add columns (key2 int, value2 varchar(10)) +alter table avc.alter_varchar_1 add columns (key2 int, value2 varchar(10)) POSTHOOK: type: ALTERTABLE_ADDCOLS -POSTHOOK: Input: default@alter_varchar_1 -POSTHOOK: Output: default@alter_varchar_1 -PREHOOK: query: select * from alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 +POSTHOOK: Output: avc@alter_varchar_1 +PREHOOK: query: select * from avc.alter_varchar_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### -POSTHOOK: query: select * from alter_varchar_1 +POSTHOOK: query: select * from avc.alter_varchar_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### 0 val_0 NULL NULL 0 val_0 NULL NULL 0 val_0 NULL NULL 10 val_10 NULL NULL 100 val_100 NULL NULL -PREHOOK: query: insert overwrite table alter_varchar_1 +PREHOOK: query: insert overwrite table avc.alter_varchar_1 select key, value, key, value from src order by key limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@alter_varchar_1 -POSTHOOK: query: insert overwrite table alter_varchar_1 +PREHOOK: Output: avc@alter_varchar_1 +POSTHOOK: query: insert overwrite table avc.alter_varchar_1 select key, value, key, value from src order by key limit 5 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@alter_varchar_1 +POSTHOOK: Output: avc@alter_varchar_1 POSTHOOK: Lineage: alter_varchar_1.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: alter_varchar_1.key2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: alter_varchar_1.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: alter_varchar_1.value2 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: select * from alter_varchar_1 +PREHOOK: query: select * from avc.alter_varchar_1 PREHOOK: type: QUERY -PREHOOK: Input: default@alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### -POSTHOOK: query: select * from alter_varchar_1 +POSTHOOK: query: select * from avc.alter_varchar_1 POSTHOOK: type: QUERY -POSTHOOK: Input: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 #### A masked pattern was here #### 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 10 val_10 10 val_10 100 val_100 100 val_100 -PREHOOK: query: drop table alter_varchar_1 +PREHOOK: query: drop table avc.alter_varchar_1 PREHOOK: type: DROPTABLE -PREHOOK: Input: default@alter_varchar_1 -PREHOOK: Output: default@alter_varchar_1 -POSTHOOK: query: drop table alter_varchar_1 +PREHOOK: Input: avc@alter_varchar_1 +PREHOOK: Output: avc@alter_varchar_1 +POSTHOOK: query: drop table avc.alter_varchar_1 POSTHOOK: type: DROPTABLE -POSTHOOK: Input: default@alter_varchar_1 -POSTHOOK: Output: default@alter_varchar_1 +POSTHOOK: Input: avc@alter_varchar_1 +POSTHOOK: Output: avc@alter_varchar_1 +PREHOOK: query: drop database avc +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:avc +PREHOOK: Output: database:avc +POSTHOOK: query: drop database avc +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:avc +POSTHOOK: Output: database:avc diff --git ql/src/test/results/clientpositive/alter_view_as_select.q.out ql/src/test/results/clientpositive/alter_view_as_select.q.out index 53a6b37..b3d42cd 100644 --- ql/src/test/results/clientpositive/alter_view_as_select.q.out +++ ql/src/test/results/clientpositive/alter_view_as_select.q.out @@ -1,20 +1,22 @@ -PREHOOK: query: DROP VIEW testView -PREHOOK: type: DROPVIEW -POSTHOOK: query: DROP VIEW testView -POSTHOOK: type: DROPVIEW -PREHOOK: query: CREATE VIEW testView as SELECT * FROM srcpart +PREHOOK: query: CREATE DATABASE tv +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:tv +POSTHOOK: query: CREATE DATABASE tv +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:tv +PREHOOK: query: CREATE VIEW tv.testView as SELECT * FROM srcpart PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart -POSTHOOK: query: CREATE VIEW testView as SELECT * FROM srcpart +POSTHOOK: query: CREATE VIEW tv.testView as SELECT * FROM srcpart POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart -POSTHOOK: Output: default@testView -PREHOOK: query: DESCRIBE FORMATTED testView +POSTHOOK: Output: tv@testView +PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE -PREHOOK: Input: default@testview -POSTHOOK: query: DESCRIBE FORMATTED testView +PREHOOK: Input: tv@testview +POSTHOOK: query: DESCRIBE FORMATTED tv.testView POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@testview +POSTHOOK: Input: tv@testview # col_name data_type comment key string @@ -23,7 +25,7 @@ ds string hr string # Detailed Table Information -Database: default +Database: tv #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -43,25 +45,25 @@ Sort Columns: [] # View Information View Original Text: SELECT * FROM srcpart View Expanded Text: SELECT `srcpart`.`key`, `srcpart`.`value`, `srcpart`.`ds`, `srcpart`.`hr` FROM `default`.`srcpart` -PREHOOK: query: ALTER VIEW testView AS SELECT value FROM src WHERE key=86 +PREHOOK: query: ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src -POSTHOOK: query: ALTER VIEW testView AS SELECT value FROM src WHERE key=86 +POSTHOOK: query: ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src -POSTHOOK: Output: default@testview -PREHOOK: query: DESCRIBE FORMATTED testView +POSTHOOK: Output: tv@testview +PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE -PREHOOK: Input: default@testview -POSTHOOK: query: DESCRIBE FORMATTED testView +PREHOOK: Input: tv@testview +POSTHOOK: query: DESCRIBE FORMATTED tv.testView POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@testview +POSTHOOK: Input: tv@testview # col_name data_type comment value string # Detailed Table Information -Database: default +Database: tv #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -81,34 +83,34 @@ Sort Columns: [] # View Information View Original Text: SELECT value FROM src WHERE key=86 View Expanded Text: SELECT `src`.`value` FROM `default`.`src` WHERE `src`.`key`=86 -PREHOOK: query: ALTER VIEW testView AS +PREHOOK: query: ALTER VIEW tv.testView AS SELECT * FROM src WHERE key > 80 AND key < 100 ORDER BY key, value LIMIT 10 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src -POSTHOOK: query: ALTER VIEW testView AS +POSTHOOK: query: ALTER VIEW tv.testView AS SELECT * FROM src WHERE key > 80 AND key < 100 ORDER BY key, value LIMIT 10 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src -POSTHOOK: Output: default@testview -PREHOOK: query: DESCRIBE FORMATTED testView +POSTHOOK: Output: tv@testview +PREHOOK: query: DESCRIBE FORMATTED tv.testView PREHOOK: type: DESCTABLE -PREHOOK: Input: default@testview -POSTHOOK: query: DESCRIBE FORMATTED testView +PREHOOK: Input: tv@testview +POSTHOOK: query: DESCRIBE FORMATTED tv.testView POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@testview +POSTHOOK: Input: tv@testview # col_name data_type comment key string value string # Detailed Table Information -Database: default +Database: tv #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -134,3 +136,19 @@ View Expanded Text: SELECT `src`.`key`, `src`.`value` FROM `default`.`src` WHERE `src`.`key` > 80 AND `src`.`key` < 100 ORDER BY `src`.`key`, `src`.`value` LIMIT 10 +PREHOOK: query: DROP VIEW tv.testView +PREHOOK: type: DROPVIEW +PREHOOK: Input: tv@testview +PREHOOK: Output: tv@testview +POSTHOOK: query: DROP VIEW tv.testView +POSTHOOK: type: DROPVIEW +POSTHOOK: Input: tv@testview +POSTHOOK: Output: tv@testview +PREHOOK: query: DROP DATABASE tv +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:tv +PREHOOK: Output: database:tv +POSTHOOK: query: DROP DATABASE tv +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:tv +POSTHOOK: Output: database:tv diff --git ql/src/test/results/clientpositive/alter_view_rename.q.out ql/src/test/results/clientpositive/alter_view_rename.q.out index 0f3dd14..43c4422 100644 --- ql/src/test/results/clientpositive/alter_view_rename.q.out +++ ql/src/test/results/clientpositive/alter_view_rename.q.out @@ -1,3 +1,15 @@ +PREHOOK: query: CREATE DATABASE tv1 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:tv1 +POSTHOOK: query: CREATE DATABASE tv1 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:tv1 +PREHOOK: query: CREATE DATABASE tv2 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:tv2 +POSTHOOK: query: CREATE DATABASE tv2 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:tv2 PREHOOK: query: CREATE TABLE invites (foo INT, bar STRING) PARTITIONED BY (ds STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -6,53 +18,53 @@ POSTHOOK: query: CREATE TABLE invites (foo INT, bar STRING) PARTITIONED BY (ds S POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@invites -PREHOOK: query: CREATE VIEW view1 as SELECT * FROM invites +PREHOOK: query: CREATE VIEW tv1.view1 as SELECT * FROM invites PREHOOK: type: CREATEVIEW PREHOOK: Input: default@invites -POSTHOOK: query: CREATE VIEW view1 as SELECT * FROM invites +POSTHOOK: query: CREATE VIEW tv1.view1 as SELECT * FROM invites POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@invites -POSTHOOK: Output: default@view1 -PREHOOK: query: DESCRIBE EXTENDED view1 +POSTHOOK: Output: tv1@view1 +PREHOOK: query: DESCRIBE EXTENDED tv1.view1 PREHOOK: type: DESCTABLE -PREHOOK: Input: default@view1 -POSTHOOK: query: DESCRIBE EXTENDED view1 +PREHOOK: Input: tv1@view1 +POSTHOOK: query: DESCRIBE EXTENDED tv1.view1 POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@view1 +POSTHOOK: Input: tv1@view1 foo int bar string ds string #### A masked pattern was here #### -PREHOOK: query: ALTER VIEW view1 RENAME TO view2 +PREHOOK: query: ALTER VIEW tv1.view1 RENAME TO tv2.view2 PREHOOK: type: ALTERVIEW_RENAME -PREHOOK: Input: default@view1 -PREHOOK: Output: default@view1 -POSTHOOK: query: ALTER VIEW view1 RENAME TO view2 +PREHOOK: Input: tv1@view1 +PREHOOK: Output: tv1@view1 +POSTHOOK: query: ALTER VIEW tv1.view1 RENAME TO tv2.view2 POSTHOOK: type: ALTERVIEW_RENAME -POSTHOOK: Input: default@view1 -POSTHOOK: Output: default@view1 -POSTHOOK: Output: default@view2 -PREHOOK: query: DESCRIBE EXTENDED view2 +POSTHOOK: Input: tv1@view1 +POSTHOOK: Output: tv1@view1 +POSTHOOK: Output: tv2@view2 +PREHOOK: query: DESCRIBE EXTENDED tv2.view2 PREHOOK: type: DESCTABLE -PREHOOK: Input: default@view2 -POSTHOOK: query: DESCRIBE EXTENDED view2 +PREHOOK: Input: tv2@view2 +POSTHOOK: query: DESCRIBE EXTENDED tv2.view2 POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@view2 +POSTHOOK: Input: tv2@view2 foo int bar string ds string #### A masked pattern was here #### -PREHOOK: query: SELECT * FROM view2 +PREHOOK: query: SELECT * FROM tv2.view2 PREHOOK: type: QUERY PREHOOK: Input: default@invites -PREHOOK: Input: default@view2 +PREHOOK: Input: tv2@view2 #### A masked pattern was here #### -POSTHOOK: query: SELECT * FROM view2 +POSTHOOK: query: SELECT * FROM tv2.view2 POSTHOOK: type: QUERY POSTHOOK: Input: default@invites -POSTHOOK: Input: default@view2 +POSTHOOK: Input: tv2@view2 #### A masked pattern was here #### PREHOOK: query: DROP TABLE invites PREHOOK: type: DROPTABLE @@ -62,11 +74,27 @@ POSTHOOK: query: DROP TABLE invites POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@invites POSTHOOK: Output: default@invites -PREHOOK: query: DROP VIEW view2 +PREHOOK: query: DROP VIEW tv2.view2 PREHOOK: type: DROPVIEW -PREHOOK: Input: default@view2 -PREHOOK: Output: default@view2 -POSTHOOK: query: DROP VIEW view2 +PREHOOK: Input: tv2@view2 +PREHOOK: Output: tv2@view2 +POSTHOOK: query: DROP VIEW tv2.view2 POSTHOOK: type: DROPVIEW -POSTHOOK: Input: default@view2 -POSTHOOK: Output: default@view2 +POSTHOOK: Input: tv2@view2 +POSTHOOK: Output: tv2@view2 +PREHOOK: query: DROP DATABASE tv1 +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:tv1 +PREHOOK: Output: database:tv1 +POSTHOOK: query: DROP DATABASE tv1 +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:tv1 +POSTHOOK: Output: database:tv1 +PREHOOK: query: DROP DATABASE tv2 +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:tv2 +PREHOOK: Output: database:tv2 +POSTHOOK: query: DROP DATABASE tv2 +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:tv2 +POSTHOOK: Output: database:tv2 diff --git ql/src/test/results/clientpositive/archive_multi.q.out ql/src/test/results/clientpositive/archive_multi.q.out index 7e84def..d0346d3 100644 --- ql/src/test/results/clientpositive/archive_multi.q.out +++ ql/src/test/results/clientpositive/archive_multi.q.out @@ -1,161 +1,161 @@ -PREHOOK: query: drop table tstsrc -PREHOOK: type: DROPTABLE -POSTHOOK: query: drop table tstsrc -POSTHOOK: type: DROPTABLE -PREHOOK: query: drop table tstsrcpart -PREHOOK: type: DROPTABLE -POSTHOOK: query: drop table tstsrcpart -POSTHOOK: type: DROPTABLE -PREHOOK: query: create table tstsrc like src +PREHOOK: query: create database ac_test +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:ac_test +POSTHOOK: query: create database ac_test +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:ac_test +PREHOOK: query: create table ac_test.tstsrc like default.src PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@tstsrc -POSTHOOK: query: create table tstsrc like src +PREHOOK: Output: ac_test@ac_test.tstsrc +PREHOOK: Output: database:ac_test +POSTHOOK: query: create table ac_test.tstsrc like default.src POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@tstsrc -PREHOOK: query: insert overwrite table tstsrc select key, value from src +POSTHOOK: Output: ac_test@ac_test.tstsrc +POSTHOOK: Output: ac_test@tstsrc +POSTHOOK: Output: database:ac_test +PREHOOK: query: insert overwrite table ac_test.tstsrc select key, value from default.src PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@tstsrc -POSTHOOK: query: insert overwrite table tstsrc select key, value from src +PREHOOK: Output: ac_test@tstsrc +POSTHOOK: query: insert overwrite table ac_test.tstsrc select key, value from default.src POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@tstsrc +POSTHOOK: Output: ac_test@tstsrc POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: create table tstsrcpart like srcpart +PREHOOK: query: create table ac_test.tstsrcpart like default.srcpart PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@tstsrcpart -POSTHOOK: query: create table tstsrcpart like srcpart +PREHOOK: Output: ac_test@ac_test.tstsrcpart +PREHOOK: Output: database:ac_test +POSTHOOK: query: create table ac_test.tstsrcpart like default.srcpart POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@tstsrcpart -PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') -select key, value from srcpart where ds='2008-04-08' and hr='11' +POSTHOOK: Output: ac_test@ac_test.tstsrcpart +POSTHOOK: Output: ac_test@tstsrcpart +POSTHOOK: Output: database:ac_test +PREHOOK: query: insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 -POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') -select key, value from srcpart where ds='2008-04-08' and hr='11' +PREHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: query: insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=11 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12') -select key, value from srcpart where ds='2008-04-08' and hr='12' +PREHOOK: query: insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-08', hr='12') +select key, value from default.srcpart where ds='2008-04-08' and hr='12' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12') -select key, value from srcpart where ds='2008-04-08' and hr='12' +PREHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: query: insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-08', hr='12') +select key, value from default.srcpart where ds='2008-04-08' and hr='12' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=12 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-09', hr='11') -select key, value from srcpart where ds='2008-04-09' and hr='11' +PREHOOK: query: insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-09', hr='11') +select key, value from default.srcpart where ds='2008-04-09' and hr='11' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 -PREHOOK: Output: default@tstsrcpart@ds=2008-04-09/hr=11 -POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-09', hr='11') -select key, value from srcpart where ds='2008-04-09' and hr='11' +PREHOOK: Output: ac_test@tstsrcpart@ds=2008-04-09/hr=11 +POSTHOOK: query: insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-09', hr='11') +select key, value from default.srcpart where ds='2008-04-09' and hr='11' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-09/hr=11 +POSTHOOK: Output: ac_test@tstsrcpart@ds=2008-04-09/hr=11 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-09', hr='12') -select key, value from srcpart where ds='2008-04-09' and hr='12' +PREHOOK: query: insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-09', hr='12') +select key, value from default.srcpart where ds='2008-04-09' and hr='12' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -PREHOOK: Output: default@tstsrcpart@ds=2008-04-09/hr=12 -POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-09', hr='12') -select key, value from srcpart where ds='2008-04-09' and hr='12' +PREHOOK: Output: ac_test@tstsrcpart@ds=2008-04-09/hr=12 +POSTHOOK: query: insert overwrite table ac_test.tstsrcpart partition (ds='2008-04-09', hr='12') +select key, value from default.srcpart where ds='2008-04-09' and hr='12' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-09/hr=12 +POSTHOOK: Output: ac_test@tstsrcpart@ds=2008-04-09/hr=12 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19) SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2 +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2 PREHOOK: type: QUERY -PREHOOK: Input: default@tstsrcpart -PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +PREHOOK: Input: ac_test@tstsrcpart +PREHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19) SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2 +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2 POSTHOOK: type: QUERY -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Input: ac_test@tstsrcpart +POSTHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### 48479881068 -PREHOOK: query: ALTER TABLE tstsrcpart ARCHIVE PARTITION (ds='2008-04-08') +PREHOOK: query: ALTER TABLE ac_test.tstsrcpart ARCHIVE PARTITION (ds='2008-04-08') PREHOOK: type: ALTERTABLE_ARCHIVE -PREHOOK: Input: default@tstsrcpart -PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 -PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: query: ALTER TABLE tstsrcpart ARCHIVE PARTITION (ds='2008-04-08') +PREHOOK: Input: ac_test@tstsrcpart +PREHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +PREHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: query: ALTER TABLE ac_test.tstsrcpart ARCHIVE PARTITION (ds='2008-04-08') POSTHOOK: type: ALTERTABLE_ARCHIVE -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Input: ac_test@tstsrcpart +POSTHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=12 PREHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2 +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2 PREHOOK: type: QUERY -PREHOOK: Input: default@tstsrcpart -PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +PREHOOK: Input: ac_test@tstsrcpart +PREHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2 +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2 POSTHOOK: type: QUERY -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Input: ac_test@tstsrcpart +POSTHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### 48479881068 -PREHOOK: query: SELECT key, count(1) FROM tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key +PREHOOK: query: SELECT key, count(1) FROM ac_test.tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key PREHOOK: type: QUERY -PREHOOK: Input: default@tstsrcpart -PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +PREHOOK: Input: ac_test@tstsrcpart +PREHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### -POSTHOOK: query: SELECT key, count(1) FROM tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key +POSTHOOK: query: SELECT key, count(1) FROM ac_test.tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key POSTHOOK: type: QUERY -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Input: ac_test@tstsrcpart +POSTHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### 0 3 -PREHOOK: query: SELECT * FROM tstsrcpart a JOIN tstsrc b ON a.key=b.key +PREHOOK: query: SELECT * FROM ac_test.tstsrcpart a JOIN ac_test.tstsrc b ON a.key=b.key WHERE a.ds='2008-04-08' AND a.hr='12' AND a.key='0' PREHOOK: type: QUERY -PREHOOK: Input: default@tstsrc -PREHOOK: Input: default@tstsrcpart -PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +PREHOOK: Input: ac_test@tstsrc +PREHOOK: Input: ac_test@tstsrcpart +PREHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### -POSTHOOK: query: SELECT * FROM tstsrcpart a JOIN tstsrc b ON a.key=b.key +POSTHOOK: query: SELECT * FROM ac_test.tstsrcpart a JOIN ac_test.tstsrc b ON a.key=b.key WHERE a.ds='2008-04-08' AND a.hr='12' AND a.key='0' POSTHOOK: type: QUERY -POSTHOOK: Input: default@tstsrc -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Input: ac_test@tstsrc +POSTHOOK: Input: ac_test@tstsrcpart +POSTHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### 0 val_0 2008-04-08 12 0 val_0 0 val_0 2008-04-08 12 0 val_0 @@ -166,28 +166,28 @@ POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 0 val_0 2008-04-08 12 0 val_0 0 val_0 2008-04-08 12 0 val_0 0 val_0 2008-04-08 12 0 val_0 -PREHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08') +PREHOOK: query: ALTER TABLE ac_test.tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08') PREHOOK: type: ALTERTABLE_UNARCHIVE -PREHOOK: Input: default@tstsrcpart -PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 -PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08') +PREHOOK: Input: ac_test@tstsrcpart +PREHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +PREHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: query: ALTER TABLE ac_test.tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08') POSTHOOK: type: ALTERTABLE_UNARCHIVE -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Input: ac_test@tstsrcpart +POSTHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: ac_test@tstsrcpart@ds=2008-04-08/hr=12 PREHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2 +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2 PREHOOK: type: QUERY -PREHOOK: Input: default@tstsrcpart -PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +PREHOOK: Input: ac_test@tstsrcpart +PREHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### POSTHOOK: query: SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col -FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2 +FROM (SELECT * FROM ac_test.tstsrcpart WHERE ds='2008-04-08') subq1) subq2 POSTHOOK: type: QUERY -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Input: ac_test@tstsrcpart +POSTHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: Input: ac_test@tstsrcpart@ds=2008-04-08/hr=12 #### A masked pattern was here #### 48479881068 diff --git ql/src/test/results/clientpositive/create_or_replace_view.q.out ql/src/test/results/clientpositive/create_or_replace_view.q.out index 52ff417..7dd554e 100644 --- ql/src/test/results/clientpositive/create_or_replace_view.q.out +++ ql/src/test/results/clientpositive/create_or_replace_view.q.out @@ -1,20 +1,22 @@ -PREHOOK: query: drop view v -PREHOOK: type: DROPVIEW -POSTHOOK: query: drop view v -POSTHOOK: type: DROPVIEW -PREHOOK: query: create view v as select * from srcpart +PREHOOK: query: create database vt +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:vt +POSTHOOK: query: create database vt +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:vt +PREHOOK: query: create view vt.v as select * from srcpart PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart -POSTHOOK: query: create view v as select * from srcpart +POSTHOOK: query: create view vt.v as select * from srcpart POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart -POSTHOOK: Output: default@v -PREHOOK: query: describe formatted v +POSTHOOK: Output: vt@v +PREHOOK: query: describe formatted vt.v PREHOOK: type: DESCTABLE -PREHOOK: Input: default@v -POSTHOOK: query: describe formatted v +PREHOOK: Input: vt@v +POSTHOOK: query: describe formatted vt.v POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v # col_name data_type comment key string @@ -23,7 +25,7 @@ ds string hr string # Detailed Table Information -Database: default +Database: vt #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -44,61 +46,61 @@ Sort Columns: [] View Original Text: select * from srcpart View Expanded Text: select `srcpart`.`key`, `srcpart`.`value`, `srcpart`.`ds`, `srcpart`.`hr` from `default`.`srcpart` PREHOOK: query: -- modifying definition of unpartitioned view -create or replace view v partitioned on (ds, hr) as select * from srcpart +create or replace view vt.v partitioned on (ds, hr) as select * from srcpart PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart POSTHOOK: query: -- modifying definition of unpartitioned view -create or replace view v partitioned on (ds, hr) as select * from srcpart +create or replace view vt.v partitioned on (ds, hr) as select * from srcpart POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart -POSTHOOK: Output: default@v -PREHOOK: query: alter view v add partition (ds='2008-04-08',hr='11') +POSTHOOK: Output: vt@v +PREHOOK: query: alter view vt.v add partition (ds='2008-04-08',hr='11') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@v -PREHOOK: Output: default@v -POSTHOOK: query: alter view v add partition (ds='2008-04-08',hr='11') +PREHOOK: Input: vt@v +PREHOOK: Output: vt@v +POSTHOOK: query: alter view vt.v add partition (ds='2008-04-08',hr='11') POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@v -POSTHOOK: Output: default@v -POSTHOOK: Output: default@v@ds=2008-04-08/hr=11 -PREHOOK: query: alter view v add partition (ds='2008-04-08',hr='12') +POSTHOOK: Input: vt@v +POSTHOOK: Output: vt@v +POSTHOOK: Output: vt@v@ds=2008-04-08/hr=11 +PREHOOK: query: alter view vt.v add partition (ds='2008-04-08',hr='12') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Input: default@v -PREHOOK: Output: default@v -POSTHOOK: query: alter view v add partition (ds='2008-04-08',hr='12') +PREHOOK: Input: vt@v +PREHOOK: Output: vt@v +POSTHOOK: query: alter view vt.v add partition (ds='2008-04-08',hr='12') POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Input: default@v -POSTHOOK: Output: default@v -POSTHOOK: Output: default@v@ds=2008-04-08/hr=12 -PREHOOK: query: select * from v where value='val_409' and ds='2008-04-08' and hr='11' +POSTHOOK: Input: vt@v +POSTHOOK: Output: vt@v +POSTHOOK: Output: vt@v@ds=2008-04-08/hr=12 +PREHOOK: query: select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@v +PREHOOK: Input: vt@v #### A masked pattern was here #### -POSTHOOK: query: select * from v where value='val_409' and ds='2008-04-08' and hr='11' +POSTHOOK: query: select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v #### A masked pattern was here #### 409 val_409 2008-04-08 11 409 val_409 2008-04-08 11 409 val_409 2008-04-08 11 -PREHOOK: query: describe formatted v +PREHOOK: query: describe formatted vt.v PREHOOK: type: DESCTABLE -PREHOOK: Input: default@v -POSTHOOK: query: describe formatted v +PREHOOK: Input: vt@v +POSTHOOK: query: describe formatted vt.v POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v # col_name data_type comment key string @@ -111,7 +113,7 @@ ds string hr string # Detailed Table Information -Database: default +Database: vt #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -131,44 +133,66 @@ Sort Columns: [] # View Information View Original Text: select * from srcpart View Expanded Text: select `srcpart`.`key`, `srcpart`.`value`, `srcpart`.`ds`, `srcpart`.`hr` from `default`.`srcpart` -PREHOOK: query: show partitions v +PREHOOK: query: show partitions vt.v PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@v -POSTHOOK: query: show partitions v +PREHOOK: Input: vt@v +POSTHOOK: query: show partitions vt.v POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v ds=2008-04-08/hr=11 ds=2008-04-08/hr=12 +PREHOOK: query: alter view vt.v drop partition (ds='2008-04-08',hr='11') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: vt@v +PREHOOK: Output: vt@v@ds=2008-04-08/hr=11 +POSTHOOK: query: alter view vt.v drop partition (ds='2008-04-08',hr='11') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: vt@v +POSTHOOK: Output: vt@v@ds=2008-04-08/hr=11 +PREHOOK: query: alter view vt.v drop partition (ds='2008-04-08',hr='12') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: vt@v +PREHOOK: Output: vt@v@ds=2008-04-08/hr=12 +POSTHOOK: query: alter view vt.v drop partition (ds='2008-04-08',hr='12') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: vt@v +POSTHOOK: Output: vt@v@ds=2008-04-08/hr=12 +PREHOOK: query: show partitions vt.v +PREHOOK: type: SHOWPARTITIONS +PREHOOK: Input: vt@v +POSTHOOK: query: show partitions vt.v +POSTHOOK: type: SHOWPARTITIONS +POSTHOOK: Input: vt@v PREHOOK: query: -- altering partitioned view 1 -create or replace view v partitioned on (ds, hr) as select value, ds, hr from srcpart +create or replace view vt.v partitioned on (ds, hr) as select value, ds, hr from srcpart PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart POSTHOOK: query: -- altering partitioned view 1 -create or replace view v partitioned on (ds, hr) as select value, ds, hr from srcpart +create or replace view vt.v partitioned on (ds, hr) as select value, ds, hr from srcpart POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart -POSTHOOK: Output: default@v -PREHOOK: query: select * from v where value='val_409' and ds='2008-04-08' and hr='11' +POSTHOOK: Output: vt@v +PREHOOK: query: select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@v +PREHOOK: Input: vt@v #### A masked pattern was here #### -POSTHOOK: query: select * from v where value='val_409' and ds='2008-04-08' and hr='11' +POSTHOOK: query: select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v #### A masked pattern was here #### val_409 2008-04-08 11 val_409 2008-04-08 11 val_409 2008-04-08 11 -PREHOOK: query: describe formatted v +PREHOOK: query: describe formatted vt.v PREHOOK: type: DESCTABLE -PREHOOK: Input: default@v -POSTHOOK: query: describe formatted v +PREHOOK: Input: vt@v +POSTHOOK: query: describe formatted vt.v POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v # col_name data_type comment value string @@ -180,7 +204,7 @@ ds string hr string # Detailed Table Information -Database: default +Database: vt #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -200,44 +224,42 @@ Sort Columns: [] # View Information View Original Text: select value, ds, hr from srcpart View Expanded Text: select `srcpart`.`value`, `srcpart`.`ds`, `srcpart`.`hr` from `default`.`srcpart` -PREHOOK: query: show partitions v +PREHOOK: query: show partitions vt.v PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@v -POSTHOOK: query: show partitions v +PREHOOK: Input: vt@v +POSTHOOK: query: show partitions vt.v POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@v -ds=2008-04-08/hr=11 -ds=2008-04-08/hr=12 +POSTHOOK: Input: vt@v PREHOOK: query: -- altering partitioned view 2 -create or replace view v partitioned on (ds, hr) as select key, value, ds, hr from srcpart +create or replace view vt.v partitioned on (ds, hr) as select key, value, ds, hr from srcpart PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart POSTHOOK: query: -- altering partitioned view 2 -create or replace view v partitioned on (ds, hr) as select key, value, ds, hr from srcpart +create or replace view vt.v partitioned on (ds, hr) as select key, value, ds, hr from srcpart POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart -POSTHOOK: Output: default@v -PREHOOK: query: select * from v where value='val_409' and ds='2008-04-08' and hr='11' +POSTHOOK: Output: vt@v +PREHOOK: query: select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Input: default@v +PREHOOK: Input: vt@v #### A masked pattern was here #### -POSTHOOK: query: select * from v where value='val_409' and ds='2008-04-08' and hr='11' +POSTHOOK: query: select * from vt.v where value='val_409' and ds='2008-04-08' and hr='11' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v #### A masked pattern was here #### 409 val_409 2008-04-08 11 409 val_409 2008-04-08 11 409 val_409 2008-04-08 11 -PREHOOK: query: describe formatted v +PREHOOK: query: describe formatted vt.v PREHOOK: type: DESCTABLE -PREHOOK: Input: default@v -POSTHOOK: query: describe formatted v +PREHOOK: Input: vt@v +POSTHOOK: query: describe formatted vt.v POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v # col_name data_type comment key string @@ -250,7 +272,7 @@ ds string hr string # Detailed Table Information -Database: default +Database: vt #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -270,22 +292,20 @@ Sort Columns: [] # View Information View Original Text: select key, value, ds, hr from srcpart View Expanded Text: select `srcpart`.`key`, `srcpart`.`value`, `srcpart`.`ds`, `srcpart`.`hr` from `default`.`srcpart` -PREHOOK: query: show partitions v +PREHOOK: query: show partitions vt.v PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@v -POSTHOOK: query: show partitions v +PREHOOK: Input: vt@v +POSTHOOK: query: show partitions vt.v POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@v -ds=2008-04-08/hr=11 -ds=2008-04-08/hr=12 -PREHOOK: query: drop view v +POSTHOOK: Input: vt@v +PREHOOK: query: drop view vt.v PREHOOK: type: DROPVIEW -PREHOOK: Input: default@v -PREHOOK: Output: default@v -POSTHOOK: query: drop view v +PREHOOK: Input: vt@v +PREHOOK: Output: vt@v +POSTHOOK: query: drop view vt.v POSTHOOK: type: DROPVIEW -POSTHOOK: Input: default@v -POSTHOOK: Output: default@v +POSTHOOK: Input: vt@v +POSTHOOK: Output: vt@v PREHOOK: query: -- updating to fix view with invalid definition create table srcpart_temp like srcpart PREHOOK: type: CREATETABLE @@ -296,13 +316,13 @@ create table srcpart_temp like srcpart POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@srcpart_temp -PREHOOK: query: create view v partitioned on (ds, hr) as select * from srcpart_temp +PREHOOK: query: create view vt.v partitioned on (ds, hr) as select * from srcpart_temp PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart_temp -POSTHOOK: query: create view v partitioned on (ds, hr) as select * from srcpart_temp +POSTHOOK: query: create view vt.v partitioned on (ds, hr) as select * from srcpart_temp POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart_temp -POSTHOOK: Output: default@v +POSTHOOK: Output: vt@v PREHOOK: query: drop table srcpart_temp PREHOOK: type: DROPTABLE PREHOOK: Input: default@srcpart_temp @@ -311,21 +331,21 @@ POSTHOOK: query: drop table srcpart_temp POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@srcpart_temp POSTHOOK: Output: default@srcpart_temp -PREHOOK: query: -- v is now invalid -create or replace view v partitioned on (ds, hr) as select * from srcpart +PREHOOK: query: -- vt.v is now invalid +create or replace view vt.v partitioned on (ds, hr) as select * from srcpart PREHOOK: type: CREATEVIEW PREHOOK: Input: default@srcpart -POSTHOOK: query: -- v is now invalid -create or replace view v partitioned on (ds, hr) as select * from srcpart +POSTHOOK: query: -- vt.v is now invalid +create or replace view vt.v partitioned on (ds, hr) as select * from srcpart POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@srcpart -POSTHOOK: Output: default@v -PREHOOK: query: describe formatted v +POSTHOOK: Output: vt@v +PREHOOK: query: describe formatted vt.v PREHOOK: type: DESCTABLE -PREHOOK: Input: default@v -POSTHOOK: query: describe formatted v +PREHOOK: Input: vt@v +POSTHOOK: query: describe formatted vt.v POSTHOOK: type: DESCTABLE -POSTHOOK: Input: default@v +POSTHOOK: Input: vt@v # col_name data_type comment key string @@ -338,7 +358,7 @@ ds string hr string # Detailed Table Information -Database: default +Database: vt #### A masked pattern was here #### Protect Mode: None Retention: 0 @@ -358,11 +378,19 @@ Sort Columns: [] # View Information View Original Text: select * from srcpart View Expanded Text: select `srcpart`.`key`, `srcpart`.`value`, `srcpart`.`ds`, `srcpart`.`hr` from `default`.`srcpart` -PREHOOK: query: drop view v +PREHOOK: query: drop view vt.v PREHOOK: type: DROPVIEW -PREHOOK: Input: default@v -PREHOOK: Output: default@v -POSTHOOK: query: drop view v +PREHOOK: Input: vt@v +PREHOOK: Output: vt@v +POSTHOOK: query: drop view vt.v POSTHOOK: type: DROPVIEW -POSTHOOK: Input: default@v -POSTHOOK: Output: default@v +POSTHOOK: Input: vt@v +POSTHOOK: Output: vt@v +PREHOOK: query: drop database vt +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:vt +PREHOOK: Output: database:vt +POSTHOOK: query: drop database vt +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:vt +POSTHOOK: Output: database:vt diff --git ql/src/test/results/clientpositive/drop_multi_partitions.q.out ql/src/test/results/clientpositive/drop_multi_partitions.q.out index 58a472c..f2a1482 100644 --- ql/src/test/results/clientpositive/drop_multi_partitions.q.out +++ ql/src/test/results/clientpositive/drop_multi_partitions.q.out @@ -1,55 +1,64 @@ -PREHOOK: query: create table mp (a string) partitioned by (b string, c string) +PREHOOK: query: create database dmp +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:dmp +POSTHOOK: query: create database dmp +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:dmp +PREHOOK: query: create table dmp.mp (a string) partitioned by (b string, c string) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@mp -POSTHOOK: query: create table mp (a string) partitioned by (b string, c string) +PREHOOK: Output: database:dmp +PREHOOK: Output: dmp@dmp.mp +POSTHOOK: query: create table dmp.mp (a string) partitioned by (b string, c string) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@mp -PREHOOK: query: alter table mp add partition (b='1', c='1') +POSTHOOK: Output: database:dmp +POSTHOOK: Output: dmp@dmp.mp +POSTHOOK: Output: dmp@mp +PREHOOK: query: alter table dmp.mp add partition (b='1', c='1') PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Output: default@mp -POSTHOOK: query: alter table mp add partition (b='1', c='1') +PREHOOK: Output: dmp@mp +POSTHOOK: query: alter table dmp.mp add partition (b='1', c='1') POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Output: default@mp -POSTHOOK: Output: default@mp@b=1/c=1 -PREHOOK: query: alter table mp add partition (b='1', c='2') +POSTHOOK: Output: dmp@mp +POSTHOOK: Output: dmp@mp@b=1/c=1 +PREHOOK: query: alter table dmp.mp add partition (b='1', c='2') PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Output: default@mp -POSTHOOK: query: alter table mp add partition (b='1', c='2') +PREHOOK: Output: dmp@mp +POSTHOOK: query: alter table dmp.mp add partition (b='1', c='2') POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Output: default@mp -POSTHOOK: Output: default@mp@b=1/c=2 -PREHOOK: query: alter table mp add partition (b='2', c='2') +POSTHOOK: Output: dmp@mp +POSTHOOK: Output: dmp@mp@b=1/c=2 +PREHOOK: query: alter table dmp.mp add partition (b='2', c='2') PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Output: default@mp -POSTHOOK: query: alter table mp add partition (b='2', c='2') +PREHOOK: Output: dmp@mp +POSTHOOK: query: alter table dmp.mp add partition (b='2', c='2') POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Output: default@mp -POSTHOOK: Output: default@mp@b=2/c=2 -PREHOOK: query: show partitions mp +POSTHOOK: Output: dmp@mp +POSTHOOK: Output: dmp@mp@b=2/c=2 +PREHOOK: query: show partitions dmp.mp PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@mp -POSTHOOK: query: show partitions mp +PREHOOK: Input: dmp@mp +POSTHOOK: query: show partitions dmp.mp POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@mp +POSTHOOK: Input: dmp@mp b=1/c=1 b=1/c=2 b=2/c=2 -PREHOOK: query: explain extended alter table mp drop partition (b='1') +PREHOOK: query: explain extended alter table dmp.mp drop partition (b='1') PREHOOK: type: ALTERTABLE_DROPPARTS -POSTHOOK: query: explain extended alter table mp drop partition (b='1') +POSTHOOK: query: explain extended alter table dmp.mp drop partition (b='1') POSTHOOK: type: ALTERTABLE_DROPPARTS ABSTRACT SYNTAX TREE: -TOK_ALTERTABLE_DROPPARTS +TOK_ALTERTABLE TOK_TABNAME + dmp mp - TOK_PARTSPEC - TOK_PARTVAL - b - = - '1' + TOK_ALTERTABLE_DROPPARTS + TOK_PARTSPEC + TOK_PARTVAL + b + = + '1' STAGE DEPENDENCIES: @@ -59,35 +68,51 @@ STAGE PLANS: Stage: Stage-0 Drop Table Operator: Drop Table - table: default.mp + table: dmp.mp -PREHOOK: query: alter table mp drop partition (b='1') +PREHOOK: query: alter table dmp.mp drop partition (b='1') PREHOOK: type: ALTERTABLE_DROPPARTS -PREHOOK: Input: default@mp -PREHOOK: Output: default@mp@b=1/c=1 -PREHOOK: Output: default@mp@b=1/c=2 -POSTHOOK: query: alter table mp drop partition (b='1') +PREHOOK: Input: dmp@mp +PREHOOK: Output: dmp@mp@b=1/c=1 +PREHOOK: Output: dmp@mp@b=1/c=2 +POSTHOOK: query: alter table dmp.mp drop partition (b='1') POSTHOOK: type: ALTERTABLE_DROPPARTS -POSTHOOK: Input: default@mp -POSTHOOK: Output: default@mp@b=1/c=1 -POSTHOOK: Output: default@mp@b=1/c=2 -PREHOOK: query: show partitions mp +POSTHOOK: Input: dmp@mp +POSTHOOK: Output: dmp@mp@b=1/c=1 +POSTHOOK: Output: dmp@mp@b=1/c=2 +PREHOOK: query: show partitions dmp.mp PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@mp -POSTHOOK: query: show partitions mp +PREHOOK: Input: dmp@mp +POSTHOOK: query: show partitions dmp.mp POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@mp +POSTHOOK: Input: dmp@mp b=2/c=2 -PREHOOK: query: alter table mp drop if exists partition (b='3') +PREHOOK: query: alter table dmp.mp drop if exists partition (b='3') PREHOOK: type: ALTERTABLE_DROPPARTS -PREHOOK: Input: default@mp -POSTHOOK: query: alter table mp drop if exists partition (b='3') +PREHOOK: Input: dmp@mp +POSTHOOK: query: alter table dmp.mp drop if exists partition (b='3') POSTHOOK: type: ALTERTABLE_DROPPARTS -POSTHOOK: Input: default@mp -PREHOOK: query: show partitions mp +POSTHOOK: Input: dmp@mp +PREHOOK: query: show partitions dmp.mp PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@mp -POSTHOOK: query: show partitions mp +PREHOOK: Input: dmp@mp +POSTHOOK: query: show partitions dmp.mp POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@mp +POSTHOOK: Input: dmp@mp b=2/c=2 +PREHOOK: query: drop table dmp.mp +PREHOOK: type: DROPTABLE +PREHOOK: Input: dmp@mp +PREHOOK: Output: dmp@mp +POSTHOOK: query: drop table dmp.mp +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: dmp@mp +POSTHOOK: Output: dmp@mp +PREHOOK: query: drop database dmp +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:dmp +PREHOOK: Output: database:dmp +POSTHOOK: query: drop database dmp +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:dmp +POSTHOOK: Output: database:dmp diff --git ql/src/test/results/clientpositive/exchange_partition.q.out ql/src/test/results/clientpositive/exchange_partition.q.out index 381a9fd..79458bc 100644 --- ql/src/test/results/clientpositive/exchange_partition.q.out +++ ql/src/test/results/clientpositive/exchange_partition.q.out @@ -1,65 +1,79 @@ -PREHOOK: query: CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING) +PREHOOK: query: create database ex1 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:ex1 +POSTHOOK: query: create database ex1 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:ex1 +PREHOOK: query: create database ex2 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:ex2 +POSTHOOK: query: create database ex2 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:ex2 +PREHOOK: query: CREATE TABLE ex1.exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@exchange_part_test1 -POSTHOOK: query: CREATE TABLE exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING) +PREHOOK: Output: database:ex1 +PREHOOK: Output: ex1@ex1.exchange_part_test1 +POSTHOOK: query: CREATE TABLE ex1.exchange_part_test1 (f1 string) PARTITIONED BY (ds STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@exchange_part_test1 -PREHOOK: query: CREATE TABLE exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING) +POSTHOOK: Output: database:ex1 +POSTHOOK: Output: ex1@ex1.exchange_part_test1 +POSTHOOK: Output: ex1@exchange_part_test1 +PREHOOK: query: CREATE TABLE ex2.exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@exchange_part_test2 -POSTHOOK: query: CREATE TABLE exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING) +PREHOOK: Output: database:ex2 +PREHOOK: Output: ex2@ex2.exchange_part_test2 +POSTHOOK: query: CREATE TABLE ex2.exchange_part_test2 (f1 string) PARTITIONED BY (ds STRING) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@exchange_part_test2 -PREHOOK: query: SHOW PARTITIONS exchange_part_test1 +POSTHOOK: Output: database:ex2 +POSTHOOK: Output: ex2@ex2.exchange_part_test2 +POSTHOOK: Output: ex2@exchange_part_test2 +PREHOOK: query: SHOW PARTITIONS ex1.exchange_part_test1 PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@exchange_part_test1 -POSTHOOK: query: SHOW PARTITIONS exchange_part_test1 +PREHOOK: Input: ex1@exchange_part_test1 +POSTHOOK: query: SHOW PARTITIONS ex1.exchange_part_test1 POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@exchange_part_test1 -PREHOOK: query: SHOW PARTITIONS exchange_part_test2 +POSTHOOK: Input: ex1@exchange_part_test1 +PREHOOK: query: SHOW PARTITIONS ex2.exchange_part_test2 PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@exchange_part_test2 -POSTHOOK: query: SHOW PARTITIONS exchange_part_test2 +PREHOOK: Input: ex2@exchange_part_test2 +POSTHOOK: query: SHOW PARTITIONS ex2.exchange_part_test2 POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@exchange_part_test2 -PREHOOK: query: ALTER TABLE exchange_part_test2 ADD PARTITION (ds='2013-04-05') +POSTHOOK: Input: ex2@exchange_part_test2 +PREHOOK: query: ALTER TABLE ex2.exchange_part_test2 ADD PARTITION (ds='2013-04-05') PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Output: default@exchange_part_test2 -POSTHOOK: query: ALTER TABLE exchange_part_test2 ADD PARTITION (ds='2013-04-05') +PREHOOK: Output: ex2@exchange_part_test2 +POSTHOOK: query: ALTER TABLE ex2.exchange_part_test2 ADD PARTITION (ds='2013-04-05') POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Output: default@exchange_part_test2 -POSTHOOK: Output: default@exchange_part_test2@ds=2013-04-05 -PREHOOK: query: SHOW PARTITIONS exchange_part_test1 +POSTHOOK: Output: ex2@exchange_part_test2 +POSTHOOK: Output: ex2@exchange_part_test2@ds=2013-04-05 +PREHOOK: query: SHOW PARTITIONS ex1.exchange_part_test1 PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@exchange_part_test1 -POSTHOOK: query: SHOW PARTITIONS exchange_part_test1 +PREHOOK: Input: ex1@exchange_part_test1 +POSTHOOK: query: SHOW PARTITIONS ex1.exchange_part_test1 POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@exchange_part_test1 -PREHOOK: query: SHOW PARTITIONS exchange_part_test2 +POSTHOOK: Input: ex1@exchange_part_test1 +PREHOOK: query: SHOW PARTITIONS ex2.exchange_part_test2 PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@exchange_part_test2 -POSTHOOK: query: SHOW PARTITIONS exchange_part_test2 +PREHOOK: Input: ex2@exchange_part_test2 +POSTHOOK: query: SHOW PARTITIONS ex2.exchange_part_test2 POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@exchange_part_test2 +POSTHOOK: Input: ex2@exchange_part_test2 ds=2013-04-05 -PREHOOK: query: ALTER TABLE exchange_part_test1 EXCHANGE PARTITION (ds='2013-04-05') WITH TABLE exchange_part_test2 +PREHOOK: query: ALTER TABLE ex1.exchange_part_test1 EXCHANGE PARTITION (ds='2013-04-05') WITH TABLE ex2.exchange_part_test2 PREHOOK: type: null -POSTHOOK: query: ALTER TABLE exchange_part_test1 EXCHANGE PARTITION (ds='2013-04-05') WITH TABLE exchange_part_test2 +POSTHOOK: query: ALTER TABLE ex1.exchange_part_test1 EXCHANGE PARTITION (ds='2013-04-05') WITH TABLE ex2.exchange_part_test2 POSTHOOK: type: null -PREHOOK: query: SHOW PARTITIONS exchange_part_test1 +PREHOOK: query: SHOW PARTITIONS ex1.exchange_part_test1 PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@exchange_part_test1 -POSTHOOK: query: SHOW PARTITIONS exchange_part_test1 +PREHOOK: Input: ex1@exchange_part_test1 +POSTHOOK: query: SHOW PARTITIONS ex1.exchange_part_test1 POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@exchange_part_test1 +POSTHOOK: Input: ex1@exchange_part_test1 ds=2013-04-05 -PREHOOK: query: SHOW PARTITIONS exchange_part_test2 +PREHOOK: query: SHOW PARTITIONS ex2.exchange_part_test2 PREHOOK: type: SHOWPARTITIONS -PREHOOK: Input: default@exchange_part_test2 -POSTHOOK: query: SHOW PARTITIONS exchange_part_test2 +PREHOOK: Input: ex2@exchange_part_test2 +POSTHOOK: query: SHOW PARTITIONS ex2.exchange_part_test2 POSTHOOK: type: SHOWPARTITIONS -POSTHOOK: Input: default@exchange_part_test2 +POSTHOOK: Input: ex2@exchange_part_test2 diff --git ql/src/test/results/clientpositive/index_auto_empty.q.out ql/src/test/results/clientpositive/index_auto_empty.q.out index 6a1a6c5..3512fe2 100644 --- ql/src/test/results/clientpositive/index_auto_empty.q.out +++ ql/src/test/results/clientpositive/index_auto_empty.q.out @@ -1,50 +1,57 @@ PREHOOK: query: -- Test to ensure that an empty index result is propagated correctly --- Create temp, and populate it with some values in src. -CREATE TABLE temp(key STRING, val STRING) STORED AS TEXTFILE -PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@temp +CREATE DATABASE it +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:it POSTHOOK: query: -- Test to ensure that an empty index result is propagated correctly --- Create temp, and populate it with some values in src. -CREATE TABLE temp(key STRING, val STRING) STORED AS TEXTFILE +CREATE DATABASE it +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:it +PREHOOK: query: -- Create temp, and populate it with some values in src. +CREATE TABLE it.temp(key STRING, val STRING) STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:it +PREHOOK: Output: it@it.temp +POSTHOOK: query: -- Create temp, and populate it with some values in src. +CREATE TABLE it.temp(key STRING, val STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@temp -PREHOOK: query: -- Build an index on temp. -CREATE INDEX temp_index ON TABLE temp(key) as 'COMPACT' WITH DEFERRED REBUILD +POSTHOOK: Output: database:it +POSTHOOK: Output: it@it.temp +POSTHOOK: Output: it@temp +PREHOOK: query: -- Build an index on it.temp. +CREATE INDEX temp_index ON TABLE it.temp(key) as 'COMPACT' WITH DEFERRED REBUILD PREHOOK: type: CREATEINDEX -PREHOOK: Input: default@temp -POSTHOOK: query: -- Build an index on temp. -CREATE INDEX temp_index ON TABLE temp(key) as 'COMPACT' WITH DEFERRED REBUILD +PREHOOK: Input: it@temp +POSTHOOK: query: -- Build an index on it.temp. +CREATE INDEX temp_index ON TABLE it.temp(key) as 'COMPACT' WITH DEFERRED REBUILD POSTHOOK: type: CREATEINDEX -POSTHOOK: Input: default@temp -POSTHOOK: Output: default@default__temp_temp_index__ -PREHOOK: query: ALTER INDEX temp_index ON temp REBUILD +POSTHOOK: Input: it@temp +POSTHOOK: Output: it@it__temp_temp_index__ +PREHOOK: query: ALTER INDEX temp_index ON it.temp REBUILD PREHOOK: type: ALTERINDEX_REBUILD -PREHOOK: Input: default@temp -PREHOOK: Output: default@default__temp_temp_index__ -POSTHOOK: query: ALTER INDEX temp_index ON temp REBUILD +PREHOOK: Input: it@temp +PREHOOK: Output: it@it__temp_temp_index__ +POSTHOOK: query: ALTER INDEX temp_index ON it.temp REBUILD POSTHOOK: type: ALTERINDEX_REBUILD -POSTHOOK: Input: default@temp -POSTHOOK: Output: default@default__temp_temp_index__ -POSTHOOK: Lineage: default__temp_temp_index__._bucketname SIMPLE [(temp)temp.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ] -POSTHOOK: Lineage: default__temp_temp_index__._offsets EXPRESSION [(temp)temp.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ] -POSTHOOK: Lineage: default__temp_temp_index__.key SIMPLE [(temp)temp.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Input: it@temp +POSTHOOK: Output: it@it__temp_temp_index__ +POSTHOOK: Lineage: it__temp_temp_index__._bucketname SIMPLE [(temp)temp.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ] +POSTHOOK: Lineage: it__temp_temp_index__._offsets EXPRESSION [(temp)temp.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ] +POSTHOOK: Lineage: it__temp_temp_index__.key SIMPLE [(temp)temp.FieldSchema(name:key, type:string, comment:null), ] PREHOOK: query: -- query should not return any values -SELECT * FROM default__temp_temp_index__ WHERE key = 86 +SELECT * FROM it.it__temp_temp_index__ WHERE key = 86 PREHOOK: type: QUERY -PREHOOK: Input: default@default__temp_temp_index__ +PREHOOK: Input: it@it__temp_temp_index__ #### A masked pattern was here #### POSTHOOK: query: -- query should not return any values -SELECT * FROM default__temp_temp_index__ WHERE key = 86 +SELECT * FROM it.it__temp_temp_index__ WHERE key = 86 POSTHOOK: type: QUERY -POSTHOOK: Input: default@default__temp_temp_index__ +POSTHOOK: Input: it@it__temp_temp_index__ #### A masked pattern was here #### -PREHOOK: query: EXPLAIN SELECT * FROM temp WHERE key = 86 +PREHOOK: query: EXPLAIN SELECT * FROM it.temp WHERE key = 86 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN SELECT * FROM temp WHERE key = 86 +POSTHOOK: query: EXPLAIN SELECT * FROM it.temp WHERE key = 86 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -79,19 +86,27 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: SELECT * FROM temp WHERE key = 86 +PREHOOK: query: SELECT * FROM it.temp WHERE key = 86 PREHOOK: type: QUERY -PREHOOK: Input: default@temp +PREHOOK: Input: it@temp #### A masked pattern was here #### -POSTHOOK: query: SELECT * FROM temp WHERE key = 86 +POSTHOOK: query: SELECT * FROM it.temp WHERE key = 86 POSTHOOK: type: QUERY -POSTHOOK: Input: default@temp +POSTHOOK: Input: it@temp #### A masked pattern was here #### -PREHOOK: query: DROP table temp +PREHOOK: query: DROP table it.temp PREHOOK: type: DROPTABLE -PREHOOK: Input: default@temp -PREHOOK: Output: default@temp -POSTHOOK: query: DROP table temp +PREHOOK: Input: it@temp +PREHOOK: Output: it@temp +POSTHOOK: query: DROP table it.temp POSTHOOK: type: DROPTABLE -POSTHOOK: Input: default@temp -POSTHOOK: Output: default@temp +POSTHOOK: Input: it@temp +POSTHOOK: Output: it@temp +PREHOOK: query: DROP DATABASE it +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:it +PREHOOK: Output: database:it +POSTHOOK: query: DROP DATABASE it +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:it +POSTHOOK: Output: database:it diff --git ql/src/test/results/clientpositive/touch.q.out ql/src/test/results/clientpositive/touch.q.out index 7ea3807..7abec01 100644 --- ql/src/test/results/clientpositive/touch.q.out +++ ql/src/test/results/clientpositive/touch.q.out @@ -1,89 +1,97 @@ -PREHOOK: query: drop table tstsrc -PREHOOK: type: DROPTABLE -POSTHOOK: query: drop table tstsrc -POSTHOOK: type: DROPTABLE -PREHOOK: query: drop table tstsrcpart -PREHOOK: type: DROPTABLE -POSTHOOK: query: drop table tstsrcpart -POSTHOOK: type: DROPTABLE -PREHOOK: query: create table tstsrc like src +PREHOOK: query: create database tc +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:tc +POSTHOOK: query: create database tc +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:tc +PREHOOK: query: create table tc.tstsrc like default.src PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@tstsrc -POSTHOOK: query: create table tstsrc like src +PREHOOK: Output: database:tc +PREHOOK: Output: tc@tc.tstsrc +POSTHOOK: query: create table tc.tstsrc like default.src POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@tstsrc -PREHOOK: query: insert overwrite table tstsrc select key, value from src +POSTHOOK: Output: database:tc +POSTHOOK: Output: tc@tc.tstsrc +POSTHOOK: Output: tc@tstsrc +PREHOOK: query: insert overwrite table tc.tstsrc select key, value from default.src PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: default@tstsrc -POSTHOOK: query: insert overwrite table tstsrc select key, value from src +PREHOOK: Output: tc@tstsrc +POSTHOOK: query: insert overwrite table tc.tstsrc select key, value from default.src POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: default@tstsrc +POSTHOOK: Output: tc@tstsrc POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: create table tstsrcpart like srcpart +PREHOOK: query: create table tc.tstsrcpart like default.srcpart PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@tstsrcpart -POSTHOOK: query: create table tstsrcpart like srcpart +PREHOOK: Output: database:tc +PREHOOK: Output: tc@tc.tstsrcpart +POSTHOOK: query: create table tc.tstsrcpart like default.srcpart POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@tstsrcpart -PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12') -select key, value from srcpart where ds='2008-04-08' and hr='12' +POSTHOOK: Output: database:tc +POSTHOOK: Output: tc@tc.tstsrcpart +POSTHOOK: Output: tc@tstsrcpart +PREHOOK: query: insert overwrite table tc.tstsrcpart partition (ds='2008-04-08', hr='12') +select key, value from default.srcpart where ds='2008-04-08' and hr='12' PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12') -select key, value from srcpart where ds='2008-04-08' and hr='12' +PREHOOK: Output: tc@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: query: insert overwrite table tc.tstsrcpart partition (ds='2008-04-08', hr='12') +select key, value from default.srcpart where ds='2008-04-08' and hr='12' POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Output: tc@tstsrcpart@ds=2008-04-08/hr=12 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: ALTER TABLE tstsrc TOUCH +PREHOOK: query: ALTER TABLE tc.tstsrc TOUCH PREHOOK: type: ALTERTABLE_TOUCH -PREHOOK: Input: default@tstsrc -PREHOOK: Output: default@tstsrc -POSTHOOK: query: ALTER TABLE tstsrc TOUCH +PREHOOK: Input: tc@tstsrc +PREHOOK: Output: tc@tstsrc +POSTHOOK: query: ALTER TABLE tc.tstsrc TOUCH POSTHOOK: type: ALTERTABLE_TOUCH -POSTHOOK: Input: default@tstsrc -POSTHOOK: Output: default@tstsrc -PREHOOK: query: ALTER TABLE tstsrcpart TOUCH +POSTHOOK: Input: tc@tstsrc +POSTHOOK: Output: tc@tstsrc +PREHOOK: query: ALTER TABLE tc.tstsrcpart TOUCH PREHOOK: type: ALTERTABLE_TOUCH -PREHOOK: Input: default@tstsrcpart -PREHOOK: Output: default@tstsrcpart -POSTHOOK: query: ALTER TABLE tstsrcpart TOUCH +PREHOOK: Input: tc@tstsrcpart +PREHOOK: Output: tc@tstsrcpart +POSTHOOK: query: ALTER TABLE tc.tstsrcpart TOUCH POSTHOOK: type: ALTERTABLE_TOUCH -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Output: default@tstsrcpart -PREHOOK: query: ALTER TABLE tstsrcpart TOUCH PARTITION (ds='2008-04-08', hr='12') +POSTHOOK: Input: tc@tstsrcpart +POSTHOOK: Output: tc@tstsrcpart +PREHOOK: query: ALTER TABLE tc.tstsrcpart TOUCH PARTITION (ds='2008-04-08', hr='12') PREHOOK: type: ALTERTABLE_TOUCH -PREHOOK: Input: default@tstsrcpart -PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: query: ALTER TABLE tstsrcpart TOUCH PARTITION (ds='2008-04-08', hr='12') +PREHOOK: Input: tc@tstsrcpart +PREHOOK: Output: tc@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: query: ALTER TABLE tc.tstsrcpart TOUCH PARTITION (ds='2008-04-08', hr='12') POSTHOOK: type: ALTERTABLE_TOUCH -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Input: default@tstsrcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -PREHOOK: query: drop table tstsrc +POSTHOOK: Input: tc@tstsrcpart +POSTHOOK: Input: tc@tstsrcpart@ds=2008-04-08/hr=12 +POSTHOOK: Output: tc@tstsrcpart@ds=2008-04-08/hr=12 +PREHOOK: query: drop table tc.tstsrc PREHOOK: type: DROPTABLE -PREHOOK: Input: default@tstsrc -PREHOOK: Output: default@tstsrc -POSTHOOK: query: drop table tstsrc +PREHOOK: Input: tc@tstsrc +PREHOOK: Output: tc@tstsrc +POSTHOOK: query: drop table tc.tstsrc POSTHOOK: type: DROPTABLE -POSTHOOK: Input: default@tstsrc -POSTHOOK: Output: default@tstsrc -PREHOOK: query: drop table tstsrcpart +POSTHOOK: Input: tc@tstsrc +POSTHOOK: Output: tc@tstsrc +PREHOOK: query: drop table tc.tstsrcpart PREHOOK: type: DROPTABLE -PREHOOK: Input: default@tstsrcpart -PREHOOK: Output: default@tstsrcpart -POSTHOOK: query: drop table tstsrcpart +PREHOOK: Input: tc@tstsrcpart +PREHOOK: Output: tc@tstsrcpart +POSTHOOK: query: drop table tc.tstsrcpart POSTHOOK: type: DROPTABLE -POSTHOOK: Input: default@tstsrcpart -POSTHOOK: Output: default@tstsrcpart +POSTHOOK: Input: tc@tstsrcpart +POSTHOOK: Output: tc@tstsrcpart +PREHOOK: query: drop database tc +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:tc +PREHOOK: Output: database:tc +POSTHOOK: query: drop database tc +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:tc +POSTHOOK: Output: database:tc diff --git ql/src/test/results/clientpositive/unset_table_view_property.q.out ql/src/test/results/clientpositive/unset_table_view_property.q.out index 8cf6686..92dfadb 100644 --- ql/src/test/results/clientpositive/unset_table_view_property.q.out +++ ql/src/test/results/clientpositive/unset_table_view_property.q.out @@ -1,29 +1,36 @@ -PREHOOK: query: CREATE TABLE testTable(col1 INT, col2 INT) +PREHOOK: query: CREATE DATABASE vt +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:vt +POSTHOOK: query: CREATE DATABASE vt +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:vt +PREHOOK: query: CREATE TABLE vt.testTable(col1 INT, col2 INT) PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@testTable -POSTHOOK: query: CREATE TABLE testTable(col1 INT, col2 INT) +PREHOOK: Output: database:vt +PREHOOK: Output: vt@vt.testTable +POSTHOOK: query: CREATE TABLE vt.testTable(col1 INT, col2 INT) POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@testTable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Output: database:vt +POSTHOOK: Output: vt@testTable +POSTHOOK: Output: vt@vt.testTable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### PREHOOK: query: -- UNSET TABLE PROPERTIES -ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3') +ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3') PREHOOK: type: ALTERTABLE_PROPERTIES -PREHOOK: Input: default@testtable -PREHOOK: Output: default@testtable +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable POSTHOOK: query: -- UNSET TABLE PROPERTIES -ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3') +ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3') POSTHOOK: type: ALTERTABLE_PROPERTIES -POSTHOOK: Input: default@testtable -POSTHOOK: Output: default@testtable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES COLUMN_STATS_ACCURATE false a 1 @@ -35,18 +42,18 @@ rawDataSize -1 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET all the properties -ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'c') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('a', 'c') PREHOOK: type: ALTERTABLE_PROPERTIES -PREHOOK: Input: default@testtable -PREHOOK: Output: default@testtable +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable POSTHOOK: query: -- UNSET all the properties -ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'c') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('a', 'c') POSTHOOK: type: ALTERTABLE_PROPERTIES -POSTHOOK: Input: default@testtable -POSTHOOK: Output: default@testtable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES COLUMN_STATS_ACCURATE false #### A masked pattern was here #### @@ -55,17 +62,17 @@ numRows -1 rawDataSize -1 totalSize 0 #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4') +PREHOOK: query: ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4') PREHOOK: type: ALTERTABLE_PROPERTIES -PREHOOK: Input: default@testtable -PREHOOK: Output: default@testtable -POSTHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4') +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable +POSTHOOK: query: ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4') POSTHOOK: type: ALTERTABLE_PROPERTIES -POSTHOOK: Input: default@testtable -POSTHOOK: Output: default@testtable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES COLUMN_STATS_ACCURATE false a 1 @@ -78,18 +85,18 @@ rawDataSize -1 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties -ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'd') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('a', 'd') PREHOOK: type: ALTERTABLE_PROPERTIES -PREHOOK: Input: default@testtable -PREHOOK: Output: default@testtable +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable POSTHOOK: query: -- UNSET a subset of the properties -ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'd') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('a', 'd') POSTHOOK: type: ALTERTABLE_PROPERTIES -POSTHOOK: Input: default@testtable -POSTHOOK: Output: default@testtable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES COLUMN_STATS_ACCURATE false c 3 @@ -100,18 +107,18 @@ rawDataSize -1 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- the same property being UNSET multiple times -ALTER TABLE testTable UNSET TBLPROPERTIES ('c', 'c', 'c') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('c', 'c', 'c') PREHOOK: type: ALTERTABLE_PROPERTIES -PREHOOK: Input: default@testtable -PREHOOK: Output: default@testtable +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable POSTHOOK: query: -- the same property being UNSET multiple times -ALTER TABLE testTable UNSET TBLPROPERTIES ('c', 'c', 'c') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES ('c', 'c', 'c') POSTHOOK: type: ALTERTABLE_PROPERTIES -POSTHOOK: Input: default@testtable -POSTHOOK: Output: default@testtable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES COLUMN_STATS_ACCURATE false #### A masked pattern was here #### @@ -120,17 +127,17 @@ numRows -1 rawDataSize -1 totalSize 0 #### A masked pattern was here #### -PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4') +PREHOOK: query: ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4') PREHOOK: type: ALTERTABLE_PROPERTIES -PREHOOK: Input: default@testtable -PREHOOK: Output: default@testtable -POSTHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4') +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable +POSTHOOK: query: ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4') POSTHOOK: type: ALTERTABLE_PROPERTIES -POSTHOOK: Input: default@testtable -POSTHOOK: Output: default@testtable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES COLUMN_STATS_ACCURATE false a 1 @@ -144,18 +151,18 @@ rawDataSize -1 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f') PREHOOK: type: ALTERTABLE_PROPERTIES -PREHOOK: Input: default@testtable -PREHOOK: Output: default@testtable +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable POSTHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f') POSTHOOK: type: ALTERTABLE_PROPERTIES -POSTHOOK: Input: default@testtable -POSTHOOK: Output: default@testtable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES COLUMN_STATS_ACCURATE false a 1 @@ -167,18 +174,18 @@ rawDataSize -1 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z') PREHOOK: type: ALTERTABLE_PROPERTIES -PREHOOK: Input: default@testtable -PREHOOK: Output: default@testtable +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable POSTHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z') +ALTER TABLE vt.testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z') POSTHOOK: type: ALTERTABLE_PROPERTIES -POSTHOOK: Input: default@testtable -POSTHOOK: Output: default@testtable -PREHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable +PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testTable +POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES COLUMN_STATS_ACCURATE false a 1 @@ -188,57 +195,65 @@ numRows -1 rawDataSize -1 totalSize 0 #### A masked pattern was here #### +PREHOOK: query: DROP TABLE vt.testTable +PREHOOK: type: DROPTABLE +PREHOOK: Input: vt@testtable +PREHOOK: Output: vt@testtable +POSTHOOK: query: DROP TABLE vt.testTable +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: vt@testtable +POSTHOOK: Output: vt@testtable PREHOOK: query: -- UNSET VIEW PROPERTIES -CREATE VIEW testView AS SELECT value FROM src WHERE key=86 +CREATE VIEW vt.testView AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src POSTHOOK: query: -- UNSET VIEW PROPERTIES -CREATE VIEW testView AS SELECT value FROM src WHERE key=86 +CREATE VIEW vt.testView AS SELECT value FROM src WHERE key=86 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: default@src -POSTHOOK: Output: default@testView -PREHOOK: query: ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propB'='200') +POSTHOOK: Output: vt@testView +PREHOOK: query: ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propB'='200') PREHOOK: type: ALTERVIEW_PROPERTIES -PREHOOK: Input: default@testview -PREHOOK: Output: default@testview -POSTHOOK: query: ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propB'='200') +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview +POSTHOOK: query: ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propB'='200') POSTHOOK: type: ALTERVIEW_PROPERTIES -POSTHOOK: Input: default@testview -POSTHOOK: Output: default@testview -PREHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: SHOW TBLPROPERTIES vt.testView PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: query: SHOW TBLPROPERTIES vt.testView POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### propA 100 propB 200 #### A masked pattern was here #### PREHOOK: query: -- UNSET all the properties -ALTER VIEW testView UNSET TBLPROPERTIES ('propA', 'propB') +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propA', 'propB') PREHOOK: type: ALTERVIEW_PROPERTIES -PREHOOK: Input: default@testview -PREHOOK: Output: default@testview +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview POSTHOOK: query: -- UNSET all the properties -ALTER VIEW testView UNSET TBLPROPERTIES ('propA', 'propB') +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propA', 'propB') POSTHOOK: type: ALTERVIEW_PROPERTIES -POSTHOOK: Input: default@testview -POSTHOOK: Output: default@testview -PREHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: SHOW TBLPROPERTIES vt.testView PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: query: SHOW TBLPROPERTIES vt.testView POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### -PREHOOK: query: ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propC'='300', 'propD'='400') +PREHOOK: query: ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propC'='300', 'propD'='400') PREHOOK: type: ALTERVIEW_PROPERTIES -PREHOOK: Input: default@testview -PREHOOK: Output: default@testview -POSTHOOK: query: ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propC'='300', 'propD'='400') +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview +POSTHOOK: query: ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propC'='300', 'propD'='400') POSTHOOK: type: ALTERVIEW_PROPERTIES -POSTHOOK: Input: default@testview -POSTHOOK: Output: default@testview -PREHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: SHOW TBLPROPERTIES vt.testView PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: query: SHOW TBLPROPERTIES vt.testView POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### propA 100 @@ -246,48 +261,48 @@ propC 300 propD 400 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties -ALTER VIEW testView UNSET TBLPROPERTIES ('propA', 'propC') +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propA', 'propC') PREHOOK: type: ALTERVIEW_PROPERTIES -PREHOOK: Input: default@testview -PREHOOK: Output: default@testview +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview POSTHOOK: query: -- UNSET a subset of the properties -ALTER VIEW testView UNSET TBLPROPERTIES ('propA', 'propC') +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propA', 'propC') POSTHOOK: type: ALTERVIEW_PROPERTIES -POSTHOOK: Input: default@testview -POSTHOOK: Output: default@testview -PREHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: SHOW TBLPROPERTIES vt.testView PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: query: SHOW TBLPROPERTIES vt.testView POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### propD 400 #### A masked pattern was here #### PREHOOK: query: -- the same property being UNSET multiple times -ALTER VIEW testView UNSET TBLPROPERTIES ('propD', 'propD', 'propD') +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propD', 'propD', 'propD') PREHOOK: type: ALTERVIEW_PROPERTIES -PREHOOK: Input: default@testview -PREHOOK: Output: default@testview +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview POSTHOOK: query: -- the same property being UNSET multiple times -ALTER VIEW testView UNSET TBLPROPERTIES ('propD', 'propD', 'propD') +ALTER VIEW vt.testView UNSET TBLPROPERTIES ('propD', 'propD', 'propD') POSTHOOK: type: ALTERVIEW_PROPERTIES -POSTHOOK: Input: default@testview -POSTHOOK: Output: default@testview -PREHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: SHOW TBLPROPERTIES vt.testView PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: query: SHOW TBLPROPERTIES vt.testView POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### -PREHOOK: query: ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propB' = '200', 'propC'='300', 'propD'='400') +PREHOOK: query: ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propB' = '200', 'propC'='300', 'propD'='400') PREHOOK: type: ALTERVIEW_PROPERTIES -PREHOOK: Input: default@testview -PREHOOK: Output: default@testview -POSTHOOK: query: ALTER VIEW testView SET TBLPROPERTIES ('propA'='100', 'propB' = '200', 'propC'='300', 'propD'='400') +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview +POSTHOOK: query: ALTER VIEW vt.testView SET TBLPROPERTIES ('propA'='100', 'propB' = '200', 'propC'='300', 'propD'='400') POSTHOOK: type: ALTERVIEW_PROPERTIES -POSTHOOK: Input: default@testview -POSTHOOK: Output: default@testview -PREHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: SHOW TBLPROPERTIES vt.testView PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: query: SHOW TBLPROPERTIES vt.testView POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### propA 100 @@ -296,37 +311,53 @@ propC 300 propD 400 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER VIEW testView UNSET TBLPROPERTIES IF EXISTS ('propC', 'propD', 'propD', 'propC', 'propZ') +ALTER VIEW vt.testView UNSET TBLPROPERTIES IF EXISTS ('propC', 'propD', 'propD', 'propC', 'propZ') PREHOOK: type: ALTERVIEW_PROPERTIES -PREHOOK: Input: default@testview -PREHOOK: Output: default@testview +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview POSTHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER VIEW testView UNSET TBLPROPERTIES IF EXISTS ('propC', 'propD', 'propD', 'propC', 'propZ') +ALTER VIEW vt.testView UNSET TBLPROPERTIES IF EXISTS ('propC', 'propD', 'propD', 'propC', 'propZ') POSTHOOK: type: ALTERVIEW_PROPERTIES -POSTHOOK: Input: default@testview -POSTHOOK: Output: default@testview -PREHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: SHOW TBLPROPERTIES vt.testView PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: query: SHOW TBLPROPERTIES vt.testView POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### propA 100 propB 200 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER VIEW testView UNSET TBLPROPERTIES IF EXISTS ('propB', 'propC', 'propD', 'propF') +ALTER VIEW vt.testView UNSET TBLPROPERTIES IF EXISTS ('propB', 'propC', 'propD', 'propF') PREHOOK: type: ALTERVIEW_PROPERTIES -PREHOOK: Input: default@testview -PREHOOK: Output: default@testview +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview POSTHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS -ALTER VIEW testView UNSET TBLPROPERTIES IF EXISTS ('propB', 'propC', 'propD', 'propF') +ALTER VIEW vt.testView UNSET TBLPROPERTIES IF EXISTS ('propB', 'propC', 'propD', 'propF') POSTHOOK: type: ALTERVIEW_PROPERTIES -POSTHOOK: Input: default@testview -POSTHOOK: Output: default@testview -PREHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: SHOW TBLPROPERTIES vt.testView PREHOOK: type: SHOW_TBLPROPERTIES -POSTHOOK: query: SHOW TBLPROPERTIES testView +POSTHOOK: query: SHOW TBLPROPERTIES vt.testView POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### propA 100 #### A masked pattern was here #### +PREHOOK: query: DROP VIEW vt.testView +PREHOOK: type: DROPVIEW +PREHOOK: Input: vt@testview +PREHOOK: Output: vt@testview +POSTHOOK: query: DROP VIEW vt.testView +POSTHOOK: type: DROPVIEW +POSTHOOK: Input: vt@testview +POSTHOOK: Output: vt@testview +PREHOOK: query: DROP DATABASE vt +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:vt +PREHOOK: Output: database:vt +POSTHOOK: query: DROP DATABASE vt +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:vt +POSTHOOK: Output: database:vt