diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index f008c4d..53be80e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -1008,7 +1008,6 @@ Table materializeCTE(String cteName, CTEClause cte) throws HiveException { table.setMaterializedTable(true); LOG.info(cteName + " will be materialized into " + location); - cte.table = table; cte.source = analyzer; ctx.addMaterializedTable(cteName, table); @@ -1567,8 +1566,7 @@ Operator getOptimizedHiveOPDag() throws SemanticException { } ASTNode selExprList = qb.getParseInfo().getSelForClause(dest); - RowResolver out_rwsch = handleInsertStatementSpec(colList, dest, inputRR, inputRR, qb, - selExprList); + RowResolver out_rwsch = handleInsertStatementSpec(colList, dest, inputRR, qb, selExprList); ArrayList columnNames = new ArrayList(); Map colExprMap = new HashMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 86b53cc..e79914d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -56,7 +56,6 @@ import org.apache.calcite.rel.RelNode; import org.apache.calcite.util.ImmutableBitSet; import org.apache.commons.lang.StringUtils; -import org.apache.curator.shaded.com.google.common.collect.Lists; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -166,7 +165,6 @@ import org.apache.hadoop.hive.ql.optimizer.lineage.Generator; import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec.SpecType; -import org.apache.hadoop.hive.ql.parse.CalcitePlanner.ASTSearcher; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderExpression; import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderSpec; @@ -268,7 +266,6 @@ import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.Utils; import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.OutputFormat; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; @@ -277,7 +274,6 @@ import com.google.common.base.Splitter; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.math.IntMath; import com.google.common.math.LongMath; @@ -383,9 +379,6 @@ CreateTableDesc tableDesc; - /** Not thread-safe. */ - final ASTSearcher astSearcher = new ASTSearcher(); - protected AnalyzeRewriteContext analyzeRewrite; private WriteEntity acidAnalyzeTable; @@ -1153,15 +1146,6 @@ private String processTable(QB qb, ASTNode tabref) throws SemanticException { return this.nameToSplitSample; } - /** - * Convert a string to Text format and write its bytes in the same way TextOutputFormat would do. - * This is needed to properly encode non-ascii characters. - */ - private static void writeAsText(String text, FSDataOutputStream out) throws IOException { - Text to = new Text(text); - out.write(to.getBytes(), 0, to.getLength()); - } - private void assertCombineInputFormat(Tree numerator, String message) throws SemanticException { String inputFormat = conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez") ? HiveConf.getVar(conf, HiveConf.ConfVars.HIVETEZINPUTFORMAT): @@ -1320,7 +1304,6 @@ private void addCTEAsSubQuery(QB qb, String cteName, String cteAlias) List parents = new ArrayList(); // materialized - Table table; SemanticAnalyzer source; List> getTasks() { @@ -1429,7 +1412,6 @@ Table materializeCTE(String cteName, CTEClause cte) throws HiveException { table.setMaterializedTable(true); LOG.info(cteName + " will be materialized into " + location); - cte.table = table; cte.source = analyzer; ctx.addMaterializedTable(cteName, table); @@ -1548,15 +1530,6 @@ private String processLateralView(QB qb, ASTNode lateralView) return alias; } - private String extractLateralViewAlias(ASTNode lateralView) { - // Lateral view AST has the following shape: - // ^(TOK_LATERAL_VIEW - // ^(TOK_SELECT ^(TOK_SELEXPR ^(TOK_FUNCTION Identifier params) identifier* tableAlias))) - ASTNode selExpr = (ASTNode) lateralView.getChild(0).getChild(0); - ASTNode astTableAlias = (ASTNode) Iterables.getLast(selExpr.getChildren()); - return astTableAlias.getChild(0).getText(); - } - /** * Phase 1: (including, but not limited to): * @@ -1925,7 +1898,7 @@ public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1, PlannerContext plan /** * This is phase1 of supporting specifying schema in insert statement * insert into foo(z,y) select a,b from bar; - * @see #handleInsertStatementSpec(java.util.List, String, RowResolver, RowResolver, QB, ASTNode) + * @see #handleInsertStatementSpec(java.util.List, String, RowResolver, QB, ASTNode) * @throws SemanticException */ private void handleInsertStatementSpecPhase1(ASTNode ast, QBParseInfo qbp, Phase1Ctx ctx_1) throws SemanticException { @@ -4612,7 +4585,7 @@ static boolean isRegex(String pattern, HiveConf conf) { } selectStar = selectStar && exprList.getChildCount() == posn + 1; - out_rwsch = handleInsertStatementSpec(col_list, dest, out_rwsch, inputRR, qb, selExprList); + out_rwsch = handleInsertStatementSpec(col_list, dest, out_rwsch, qb, selExprList); ArrayList columnNames = new ArrayList(); Map colExprMap = new HashMap(); @@ -4664,7 +4637,6 @@ private RowResolver getColForInsertStmtSpec(Map targetCol2 } } - boolean defaultConstraintsFetch = true; for (int i = 0; i < targetTableColNames.size(); i++) { String f = targetTableColNames.get(i); if(targetCol2Projection.containsKey(f)) { @@ -4723,7 +4695,7 @@ private RowResolver getColForInsertStmtSpec(Map targetCol2 * @throws SemanticException */ public RowResolver handleInsertStatementSpec(List col_list, String dest, - RowResolver outputRR, RowResolver inputRR, QB qb, + RowResolver outputRR, QB qb, ASTNode selExprList) throws SemanticException { //(z,x) List targetTableSchema = qb.getParseInfo().getDestSchemaForClause(dest);//specified in the query @@ -4813,17 +4785,6 @@ boolean autogenColAliasPrfxIncludeFuncName() { } /** - * Convert exprNodeDesc array to Typeinfo array. - */ - static ArrayList getTypeInfo(ArrayList exprs) { - ArrayList result = new ArrayList(); - for (ExprNodeDesc expr : exprs) { - result.add(expr.getTypeInfo()); - } - return result; - } - - /** * Convert exprNodeDesc array to ObjectInspector array. */ static ArrayList getWritableObjectInspector(ArrayList exprs) { @@ -4835,18 +4796,6 @@ boolean autogenColAliasPrfxIncludeFuncName() { } /** - * Convert exprNodeDesc array to Typeinfo array. - */ - static ObjectInspector[] getStandardObjectInspector(ArrayList exprs) { - ObjectInspector[] result = new ObjectInspector[exprs.size()]; - for (int i = 0; i < exprs.size(); i++) { - result[i] = TypeInfoUtils - .getStandardWritableObjectInspectorFromTypeInfo(exprs.get(i)); - } - return result; - } - - /** * Returns the GenericUDAFEvaluator for the aggregation. This is called once * for each GroupBy aggregation. */ @@ -5591,7 +5540,7 @@ private ReduceSinkOperator genGroupByPlanReduceSinkOperator(QB qb, List outputKeyColumnNames = new ArrayList(); List outputValueColumnNames = new ArrayList(); - ArrayList reduceKeys = getReduceKeysForReduceSink(grpByExprs, dest, + ArrayList reduceKeys = getReduceKeysForReduceSink(grpByExprs, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames, colExprMap); @@ -5659,7 +5608,7 @@ private ReduceSinkOperator genGroupByPlanReduceSinkOperator(QB qb, return rsOp; } - private ArrayList getReduceKeysForReduceSink(List grpByExprs, String dest, + private ArrayList getReduceKeysForReduceSink(List grpByExprs, RowResolver reduceSinkInputRowResolver, RowResolver reduceSinkOutputRowResolver, List outputKeyColumnNames, Map colExprMap) throws SemanticException { @@ -5797,7 +5746,7 @@ private ReduceSinkOperator genCommonGroupByPlanReduceSinkOperator(QB qb, List outputValueColumnNames = new ArrayList(); List grpByExprs = getGroupByForClause(parseInfo, dest); - ArrayList reduceKeys = getReduceKeysForReduceSink(grpByExprs, dest, + ArrayList reduceKeys = getReduceKeysForReduceSink(grpByExprs, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames, colExprMap); @@ -6059,7 +6008,6 @@ private Operator genGroupByPlanGroupByOperator2MR(QBParseInfo parseInfo, boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI; containsDistinctAggr = containsDistinctAggr || isDistinct; - boolean isStar = value.getType() == HiveParser.TOK_FUNCTIONSTAR; Mode amode = groupByDescModeToUDAFMode(mode, isDistinct); GenericUDAFEvaluator genericUDAFEvaluator = genericUDAFEvaluators .get(entry.getKey()); @@ -6265,15 +6213,6 @@ private Operator genGroupByPlan1ReduceMultiGBY(List dests, QB qb, Operat return curr; } - static ArrayList getUDAFEvaluators( - ArrayList aggs) { - ArrayList result = new ArrayList(); - for (int i = 0; i < aggs.size(); i++) { - result.add(aggs.get(i).getGenericUDAFEvaluator()); - } - return result; - } - /** * Generate a Group-By plan using a 2 map-reduce jobs (5 operators will be * inserted): @@ -11698,19 +11637,6 @@ public void init(boolean clearPartsCache) { this.qb = qb; } - boolean analyzeCreateTable(ASTNode child) throws SemanticException { - if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE) { - // if it is not CTAS, we don't need to go further and just return - if ((child = analyzeCreateTable(ast, qb, null)) == null) { - return true; - } - } else { - queryState.setCommandType(HiveOperation.QUERY); - } - - return false; - } - @Override @SuppressWarnings("nls") public void analyzeInternal(ASTNode ast) throws SemanticException { @@ -11730,13 +11656,6 @@ public PlannerContext create() { * Planner specific stuff goes in here. */ static class PlannerContext { - protected ASTNode child; - protected Phase1Ctx ctx_1; - - void setParseTreeAttr(ASTNode child, Phase1Ctx ctx_1) { - this.child = child; - this.ctx_1 = ctx_1; - } void setCTASToken(ASTNode child) { } @@ -12074,8 +11993,6 @@ else if(ast.getChild(0).getType() == HiveParser.TOK_FALSE) { getMetaData(qb, createVwDesc == null); LOG.info("Completed getting MetaData in Semantic Analysis"); - plannerCtx.setParseTreeAttr(child, ctx_1); - return true; } @@ -13446,10 +13363,23 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt storageFormat.fillDefaultStorageFormat(false, isMaterialized); - if (ifNotExists && orReplace){ + if (ifNotExists && orReplace) { throw new SemanticException("Can't combine IF NOT EXISTS and OR REPLACE."); } + if (isMaterialized && !ifNotExists) { + // Verify that the table does not already exist + // dumpTable is only used to check the conflict for non-temporary tables + try { + Table dumpTable = db.newTable(dbDotTable); + if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx.isExplainSkipExecution()) { + throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTable)); + } + } catch (HiveException e) { + throw new SemanticException(e); + } + } + if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveParser.TOK_QUERY) { isAlterViewAs = true; @@ -13482,10 +13412,6 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt return selectStmt; } - CreateViewDesc getCreateViewDesc() { - return this.createVwDesc; - } - // validate the (materialized) view statement // check semantic conditions private void validateCreateView() diff --git a/ql/src/test/queries/clientnegative/materialized_view_name_collusion.q b/ql/src/test/queries/clientnegative/materialized_view_name_collusion.q new file mode 100644 index 0000000..567ac2a --- /dev/null +++ b/ql/src/test/queries/clientnegative/materialized_view_name_collusion.q @@ -0,0 +1,10 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.enforce.bucketing=true; + +create table mvnc_basetable (a int, b varchar(256), c decimal(10,2)); + + +create materialized view mvnc_mat_view disable rewrite as select a, b, c from mvnc_basetable; + +create materialized view mvnc_mat_view disable rewrite as select a, b, c from mvnc_basetable; diff --git a/ql/src/test/results/clientnegative/materialized_view_name_collusion.q.out b/ql/src/test/results/clientnegative/materialized_view_name_collusion.q.out new file mode 100644 index 0000000..9a2cb3e --- /dev/null +++ b/ql/src/test/results/clientnegative/materialized_view_name_collusion.q.out @@ -0,0 +1,19 @@ +PREHOOK: query: create table mvnc_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@mvnc_basetable +POSTHOOK: query: create table mvnc_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@mvnc_basetable +PREHOOK: query: create materialized view mvnc_mat_view disable rewrite as select a, b, c from mvnc_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@mvnc_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@mvnc_mat_view +POSTHOOK: query: create materialized view mvnc_mat_view disable rewrite as select a, b, c from mvnc_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@mvnc_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@mvnc_mat_view +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: Table already exists: default.mvnc_mat_view