diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 3874862..344dc69 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -34,6 +34,7 @@ import org.antlr.runtime.tree.Tree; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; @@ -315,14 +316,21 @@ public abstract class BaseSemanticAnalyzer { * @return if DB name is give, db.tab is returned. Otherwise, tab. */ public static String getUnescapedName(ASTNode tableNameNode) { + return getUnescapedName(tableNameNode, false); + } + + public static String getUnescapedName(ASTNode tableNameNode, boolean prependDefaultDB) { if (tableNameNode.getToken().getType() == HiveParser.TOK_TABNAME) { if (tableNameNode.getChildCount() == 2) { String dbName = unescapeIdentifier(tableNameNode.getChild(0).getText()); String tableName = unescapeIdentifier(tableNameNode.getChild(1).getText()); return dbName + "." + tableName; - } else { - return unescapeIdentifier(tableNameNode.getChild(0).getText()); } + String tableName = unescapeIdentifier(tableNameNode.getChild(0).getText()); + if (prependDefaultDB) { + return MetaStoreUtils.DEFAULT_DATABASE_NAME + "." + tableName; + } + return tableName; } return unescapeIdentifier(tableNameNode.getText()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java index 4e2b88a..b38c002 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java @@ -132,12 +132,13 @@ public class QBParseInfo { } } - public void addInsertIntoTable(String table) { - insertIntoTables.add(table.toLowerCase()); + public void addInsertIntoTable(String fullName) { + insertIntoTables.add(fullName.toLowerCase()); } - public boolean isInsertIntoTable(String table) { - return insertIntoTables.contains(table); + public boolean isInsertIntoTable(String dbName, String table) { + String fullName = dbName + "." + table; + return insertIntoTables.contains(fullName.toLowerCase()); } public HashMap getAggregationExprsForClause(String clause) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 33ce6ca..5311213 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -699,8 +699,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { break; case HiveParser.TOK_INSERT_INTO: - String tab_name = getUnescapedName((ASTNode)ast.getChild(0). - getChild(0)); + String tab_name = getUnescapedName((ASTNode)ast.getChild(0).getChild(0), true); qbp.addInsertIntoTable(tab_name); case HiveParser.TOK_DESTINATION: @@ -925,7 +924,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } // Disallow INSERT INTO on bucketized tables - if(qb.getParseInfo().isInsertIntoTable(tab_name) && + if(qb.getParseInfo().isInsertIntoTable(tab.getDbName(), tab_name) && tab.getNumBuckets() > 0) { throw new SemanticException(ErrorMsg.INSERT_INTO_BUCKETIZED_TABLE. getMsg("Table: " + tab_name)); @@ -4048,7 +4047,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { if (!isNonNativeTable) { ltd = new LoadTableDesc(queryTmpdir, ctx.getExternalTmpFileURI(dest_path.toUri()), table_desc, dpCtx); - ltd.setReplace(!qb.getParseInfo().isInsertIntoTable( + ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(), dest_tab.getTableName())); if (holdDDLTime) { @@ -4121,7 +4120,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { ltd = new LoadTableDesc(queryTmpdir, ctx.getExternalTmpFileURI(dest_path.toUri()), table_desc, dest_part.getSpec()); - ltd.setReplace(!qb.getParseInfo().isInsertIntoTable( + ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(), dest_tab.getTableName())); if (holdDDLTime) { diff --git ql/src/test/queries/clientpositive/insert1.q ql/src/test/queries/clientpositive/insert1.q index 10fe4ee..a992e69 100644 --- ql/src/test/queries/clientpositive/insert1.q +++ ql/src/test/queries/clientpositive/insert1.q @@ -7,4 +7,15 @@ insert overwrite table insert1 select a.key, a.value from insert2 a WHERE (a.key explain insert into table insert1 select a.key, a.value from insert2 a WHERE (a.key=-1); explain insert into table INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1); +-- HIVE-3465 +create database x; +create table x.insert1(key int, value string) stored as textfile; +explain insert into table x.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1); + +explain insert into table default.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1); + +explain +from insert2 +insert into table insert1 select * where key < 10 +insert overwrite table x.insert1 select * where key > 10 and key < 20; diff --git ql/src/test/results/clientpositive/insert1.q.out ql/src/test/results/clientpositive/insert1.q.out index f147d5f..c047eaf 100644 --- ql/src/test/results/clientpositive/insert1.q.out +++ ql/src/test/results/clientpositive/insert1.q.out @@ -220,3 +220,407 @@ STAGE PLANS: #### A masked pattern was here #### +PREHOOK: query: -- HIVE-3465 +create database x +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: -- HIVE-3465 +create database x +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Lineage: insert1.key SIMPLE [(insert2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert1.value SIMPLE [(insert2)a.FieldSchema(name:value, type:string, comment:null), ] +PREHOOK: query: create table x.insert1(key int, value string) stored as textfile +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table x.insert1(key int, value string) stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: x@insert1 +POSTHOOK: Lineage: insert1.key SIMPLE [(insert2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert1.value SIMPLE [(insert2)a.FieldSchema(name:value, type:string, comment:null), ] +PREHOOK: query: explain insert into table x.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1) +PREHOOK: type: QUERY +POSTHOOK: query: explain insert into table x.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1) +POSTHOOK: type: QUERY +POSTHOOK: Lineage: insert1.key SIMPLE [(insert2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert1.value SIMPLE [(insert2)a.FieldSchema(name:value, type:string, comment:null), ] +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert2) a)) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME x INSERT1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL a) key) (- 1))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 + Stage-4 + Stage-0 depends on stages: Stage-4, Stage-3, Stage-6 + Stage-2 depends on stages: Stage-0 + Stage-3 + Stage-5 + Stage-6 depends on stages: Stage-5 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a + TableScan + alias: a + Filter Operator + predicate: + expr: (key = (- 1)) + type: boolean + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: x.insert1 + + Stage: Stage-7 + Conditional Operator + + Stage: Stage-4 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + Stage: Stage-0 + Move Operator + tables: + replace: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: x.insert1 + + Stage: Stage-2 + Stats-Aggr Operator + + Stage: Stage-3 + Map Reduce + Alias -> Map Operator Tree: +#### A masked pattern was here #### + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: x.insert1 + + Stage: Stage-5 + Map Reduce + Alias -> Map Operator Tree: +#### A masked pattern was here #### + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: x.insert1 + + Stage: Stage-6 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + +PREHOOK: query: explain insert into table default.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1) +PREHOOK: type: QUERY +POSTHOOK: query: explain insert into table default.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1) +POSTHOOK: type: QUERY +POSTHOOK: Lineage: insert1.key SIMPLE [(insert2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert1.value SIMPLE [(insert2)a.FieldSchema(name:value, type:string, comment:null), ] +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert2) a)) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME default INSERT1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL a) key) (- 1))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5 + Stage-4 + Stage-0 depends on stages: Stage-4, Stage-3, Stage-6 + Stage-2 depends on stages: Stage-0 + Stage-3 + Stage-5 + Stage-6 depends on stages: Stage-5 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a + TableScan + alias: a + Filter Operator + predicate: + expr: (key = (- 1)) + type: boolean + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.insert1 + + Stage: Stage-7 + Conditional Operator + + Stage: Stage-4 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + Stage: Stage-0 + Move Operator + tables: + replace: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.insert1 + + Stage: Stage-2 + Stats-Aggr Operator + + Stage: Stage-3 + Map Reduce + Alias -> Map Operator Tree: +#### A masked pattern was here #### + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.insert1 + + Stage: Stage-5 + Map Reduce + Alias -> Map Operator Tree: +#### A masked pattern was here #### + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.insert1 + + Stage: Stage-6 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + +PREHOOK: query: explain +from insert2 +insert into table insert1 select * where key < 10 +insert overwrite table x.insert1 select * where key > 10 and key < 20 +PREHOOK: type: QUERY +POSTHOOK: query: explain +from insert2 +insert into table insert1 select * where key < 10 +insert overwrite table x.insert1 select * where key > 10 and key < 20 +POSTHOOK: type: QUERY +POSTHOOK: Lineage: insert1.key SIMPLE [(insert2)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert1.value SIMPLE [(insert2)a.FieldSchema(name:value, type:string, comment:null), ] +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert2))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert1))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 10))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME x insert1))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 10) (< (TOK_TABLE_OR_COL key) 20))))) + +STAGE DEPENDENCIES: + Stage-2 is a root stage + Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6 + Stage-5 + Stage-0 depends on stages: Stage-5, Stage-4, Stage-7 + Stage-3 depends on stages: Stage-0 + Stage-4 + Stage-6 + Stage-7 depends on stages: Stage-6 + Stage-14 depends on stages: Stage-2 , consists of Stage-11, Stage-10, Stage-12 + Stage-11 + Stage-1 depends on stages: Stage-11, Stage-10, Stage-13 + Stage-9 depends on stages: Stage-1 + Stage-10 + Stage-12 + Stage-13 depends on stages: Stage-12 + +STAGE PLANS: + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + insert2 + TableScan + alias: insert2 + Filter Operator + predicate: + expr: (key < 10) + type: boolean + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.insert1 + Filter Operator + predicate: + expr: ((key > 10) and (key < 20)) + type: boolean + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: x.insert1 + + Stage: Stage-8 + Conditional Operator + + Stage: Stage-5 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + Stage: Stage-0 + Move Operator + tables: + replace: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.insert1 + + Stage: Stage-3 + Stats-Aggr Operator + + Stage: Stage-4 + Map Reduce + Alias -> Map Operator Tree: +#### A masked pattern was here #### + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.insert1 + + Stage: Stage-6 + Map Reduce + Alias -> Map Operator Tree: +#### A masked pattern was here #### + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.insert1 + + Stage: Stage-7 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + Stage: Stage-14 + Conditional Operator + + Stage: Stage-11 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + + Stage: Stage-1 + Move Operator + tables: + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: x.insert1 + + Stage: Stage-9 + Stats-Aggr Operator + + Stage: Stage-10 + Map Reduce + Alias -> Map Operator Tree: +#### A masked pattern was here #### + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: x.insert1 + + Stage: Stage-12 + Map Reduce + Alias -> Map Operator Tree: +#### A masked pattern was here #### + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: x.insert1 + + Stage: Stage-13 + Move Operator + files: + hdfs directory: true +#### A masked pattern was here #### + +