diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 14a538b..a386e0d 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1104,10 +1104,15 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal HIVEDEFAULTFILEFORMAT("hive.default.fileformat", "TextFile", new StringSet("TextFile", "SequenceFile", "RCfile", "ORC"), "Default file format for CREATE TABLE statement. Users can explicitly override it by CREATE TABLE ... STORED AS [FORMAT]"), HIVEDEFAULTMANAGEDFILEFORMAT("hive.default.fileformat.managed", "none", - new StringSet("none", "TextFile", "SequenceFile", "RCfile", "ORC"), - "Default file format for CREATE TABLE statement applied to managed tables only. External tables will be \n" + - "created with format specified by hive.default.fileformat. Leaving this null will result in using hive.default.fileformat \n" + - "for all tables."), + new StringSet("none", "TextFile", "SequenceFile", "RCfile", "ORC"), + "Default file format for CREATE TABLE statement applied to managed tables only. External tables will be \n" + + "created with format specified by hive.default.fileformat. Leaving this null will result in using hive.default.fileformat \n" + + "for all tables."), + HIVEMATERIALIZEDVIEWFILEFORMAT("hive.materializedview.fileformat", "ORC", + new StringSet("none", "TextFile", "SequenceFile", "RCfile", "ORC"), + "File format for CREATE MATERIALIZED VIEW statement."), + HIVEMATERIALIZEDVIEWSERDE("hive.materializedview.serde", + "org.apache.hadoop.hive.ql.io.orc.OrcSerde", "Serde used for materialized views"), HIVEQUERYRESULTFILEFORMAT("hive.query.result.fileformat", "SequenceFile", new StringSet("TextFile", "SequenceFile", "RCfile", "Llap"), "Default file format for storing result of the query."), HIVECHECKFILEFORMAT("hive.fileformat.check", true, "Whether to check file format or not when loading data files"), diff --git jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java index 9d73470..fa984f4 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java @@ -728,6 +728,8 @@ public static String toJdbcTableType(String hivetabletype) { return "VIEW"; } else if (hivetabletype.equals(TableType.EXTERNAL_TABLE.toString())) { return "EXTERNAL TABLE"; + } else if (hivetabletype.equals(TableType.MATERIALIZED_VIEW.toString())) { + return "MATERIALIZED VIEW"; } else { return hivetabletype; } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index f632542..41385f7 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -1548,6 +1548,13 @@ public static boolean isIndexTable(Table table) { return TableType.INDEX_TABLE.toString().equals(table.getTableType()); } + public static boolean isMaterializedViewTable(Table table) { + if (table == null) { + return false; + } + return TableType.MATERIALIZED_VIEW.toString().equals(table.getTableType()); + } + /** * Given a map of partition column names to values, this creates a filter * string that can be used to call the *byFilter methods diff --git metastore/src/java/org/apache/hadoop/hive/metastore/TableType.java metastore/src/java/org/apache/hadoop/hive/metastore/TableType.java index 56eeaa0..e9e16d7 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/TableType.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/TableType.java @@ -22,5 +22,5 @@ * Typesafe enum for types of tables described by the metastore. */ public enum TableType { - MANAGED_TABLE, EXTERNAL_TABLE, VIRTUAL_VIEW, INDEX_TABLE + MANAGED_TABLE, EXTERNAL_TABLE, VIRTUAL_VIEW, INDEX_TABLE, MATERIALIZED_VIEW } diff --git ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 602b4fc..e98fd82 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -449,6 +449,10 @@ INVALID_PK_SYNTAX(10326, "Invalid Primary Key syntax"), ACID_NOT_ENOUGH_HISTORY(10327, "Not enough history available for ({0},{1}). " + "Oldest available base: {2}", true), + REPLACE_VIEW_WITH_MATERIALIZED(10400, "Attempt to replace view {0} with materialized view", true), + REPLACE_MATERIALIZED_WITH_VIEW(10401, "Attempt to replace materialized view {0} with view", true), + UPDATE_DELETE_VIEW(10402, "You cannot update or delete records in a view"), + MATERIALIZED_VIEW_DEF_EMPTY(10403, "Query for the materialized view rebuild could not be retrieved"), //========================== 20000 range starts here ========================// SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."), SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. " diff --git ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java index 3bc9432..650792b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java +++ ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java @@ -65,6 +65,8 @@ private boolean multiDestQuery; private boolean filterWithSubQuery; + // True if this statement creates or replaces a materialized view + private boolean isMaterializedView; public boolean isQuery() { return query; @@ -260,6 +262,19 @@ public boolean hasFilterWithSubQuery() { return this.filterWithSubQuery; } + /** + * True indicates this statement create or replaces a materialized view, not that it is a query + * against a materialized view. + * @return + */ + public boolean isMaterializedView() { + return isMaterializedView; + } + + public void setMaterializedView(boolean isMaterializedView) { + this.isMaterializedView = isMaterializedView; + } + public void clear() { query = false; analyzeCommand = false; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index a59b781..1556b30 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -3789,7 +3789,22 @@ private void dropTable(Hive db, Table tbl, DropTableDesc dropTbl) throws HiveExc if (dropTbl.getIfExists()) { return; } - throw new HiveException("Cannot drop a view with DROP TABLE"); + if (dropTbl.getExpectMaterializedView()) { + throw new HiveException("Cannot drop a view with DROP MATERIALIZED VIEW"); + } else { + throw new HiveException("Cannot drop a view with DROP TABLE"); + } + } + } else if (tbl.isMaterializedView()) { + if (!dropTbl.getExpectMaterializedView()) { + if (dropTbl.getIfExists()) { + return; + } + if (dropTbl.getExpectView()) { + throw new HiveException("Cannot drop a materialized view with DROP VIEW"); + } else { + throw new HiveException("Cannot drop a materialized view with DROP TABLE"); + } } } else { if (dropTbl.getExpectView()) { @@ -3798,6 +3813,12 @@ private void dropTable(Hive db, Table tbl, DropTableDesc dropTbl) throws HiveExc } throw new HiveException( "Cannot drop a base table with DROP VIEW"); + } else if (dropTbl.getExpectMaterializedView()) { + if (dropTbl.getIfExists()) { + return; + } + throw new HiveException( + "Cannot drop a base table with DROP MATERIALIZED VIEW"); } } } @@ -4035,7 +4056,8 @@ private int createTableLike(Hive db, CreateTableLikeDesc crtTbl) throws Exceptio // Get the existing table Table oldtbl = db.getTable(crtTbl.getLikeTableName()); Table tbl; - if (oldtbl.getTableType() == TableType.VIRTUAL_VIEW) { + if (oldtbl.getTableType() == TableType.VIRTUAL_VIEW || + oldtbl.getTableType() == TableType.MATERIALIZED_VIEW) { String targetTableName = crtTbl.getTableName(); tbl=db.newTable(targetTableName); @@ -4181,39 +4203,50 @@ private int createTableLike(Hive db, CreateTableLikeDesc crtTbl) throws Exceptio private int createView(Hive db, CreateViewDesc crtView) throws HiveException { Table oldview = db.getTable(crtView.getViewName(), false); if (crtView.getOrReplace() && oldview != null) { - // replace existing view - // remove the existing partition columns from the field schema - oldview.setViewOriginalText(crtView.getViewOriginalText()); - oldview.setViewExpandedText(crtView.getViewExpandedText()); - oldview.setFields(crtView.getSchema()); - if (crtView.getComment() != null) { - oldview.setProperty("comment", crtView.getComment()); - } - if (crtView.getTblProps() != null) { - oldview.getTTable().getParameters().putAll(crtView.getTblProps()); - } - oldview.setPartCols(crtView.getPartCols()); - if (crtView.getInputFormat() != null) { - oldview.setInputFormatClass(crtView.getInputFormat()); - } - if (crtView.getOutputFormat() != null) { - oldview.setOutputFormatClass(crtView.getOutputFormat()); - } - oldview.checkValidity(null); - try { - db.alterTable(crtView.getViewName(), oldview, null); - } catch (InvalidOperationException e) { - throw new HiveException(e); + if (!crtView.isMaterialized()) { + // replace existing view + // remove the existing partition columns from the field schema + oldview.setViewOriginalText(crtView.getViewOriginalText()); + oldview.setViewExpandedText(crtView.getViewExpandedText()); + oldview.setFields(crtView.getSchema()); + if (crtView.getComment() != null) { + oldview.setProperty("comment", crtView.getComment()); + } + if (crtView.getTblProps() != null) { + oldview.getTTable().getParameters().putAll(crtView.getTblProps()); + } + oldview.setPartCols(crtView.getPartCols()); + if (crtView.getInputFormat() != null) { + oldview.setInputFormatClass(crtView.getInputFormat()); + } + if (crtView.getOutputFormat() != null) { + oldview.setOutputFormatClass(crtView.getOutputFormat()); + } + oldview.checkValidity(null); + try { + db.alterTable(crtView.getViewName(), oldview, null); + } catch (InvalidOperationException e) { + throw new HiveException(e); + } + work.getOutputs().add(new WriteEntity(oldview, WriteEntity.WriteType.DDL_NO_LOCK)); + } else { + // This is a replace, so we need an exclusive lock + work.getOutputs().add(new WriteEntity(oldview, WriteEntity.WriteType.DDL_EXCLUSIVE)); } - work.getOutputs().add(new WriteEntity(oldview, WriteEntity.WriteType.DDL_NO_LOCK)); } else { // create new view Table tbl = db.newTable(crtView.getViewName()); - tbl.setTableType(TableType.VIRTUAL_VIEW); + if (crtView.isMaterialized()) { + tbl.setTableType(TableType.MATERIALIZED_VIEW); + } else { + tbl.setTableType(TableType.VIRTUAL_VIEW); + } tbl.setSerializationLib(null); tbl.clearSerDeInfo(); tbl.setViewOriginalText(crtView.getViewOriginalText()); - tbl.setViewExpandedText(crtView.getViewExpandedText()); + if (!crtView.isMaterialized()) { + tbl.setViewExpandedText(crtView.getViewExpandedText()); + } tbl.setFields(crtView.getSchema()); if (crtView.getComment() != null) { tbl.setProperty("comment", crtView.getComment()); @@ -4229,10 +4262,23 @@ private int createView(Hive db, CreateViewDesc crtView) throws HiveException { if (crtView.getInputFormat() != null) { tbl.setInputFormatClass(crtView.getInputFormat()); } + if (crtView.getOutputFormat() != null) { tbl.setOutputFormatClass(crtView.getOutputFormat()); } + if (crtView.isMaterialized()) { + if (crtView.getLocation() != null) { + tbl.setDataLocation(new Path(crtView.getLocation())); + } + // Short circuit the checks that the input format is valid, this is configured for all + // materialized views and doesn't change so we don't need to check it constantly. + tbl.getSd().setInputFormat(crtView.getInputFormat()); + tbl.getSd().setOutputFormat(crtView.getOutputFormat()); + tbl.getSd().setSerdeInfo(new SerDeInfo(crtView.getSerde(), crtView.getSerde(), + crtView.getSerdeProps())); + } + db.createTable(tbl, crtView.getIfNotExists()); work.getOutputs().add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index e0d35d3..ea90889 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -209,8 +209,11 @@ public void checkValidity(Configuration conf) throws HiveException { } if (isView()) { + assert (getViewOriginalText() != null); + assert (getViewExpandedText() != null); + } else if (isMaterializedView()) { assert(getViewOriginalText() != null); - assert(getViewExpandedText() != null); + assert(getViewExpandedText() == null); } else { assert(getViewOriginalText() == null); assert(getViewExpandedText() == null); @@ -824,6 +827,10 @@ public boolean isView() { return TableType.VIRTUAL_VIEW.equals(getTableType()); } + public boolean isMaterializedView() { + return TableType.MATERIALIZED_VIEW.equals(getTableType()); + } + /** * @return whether this table is actually an index table */ diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index db7aeef..205faff 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -995,7 +995,9 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit assert (ast.getToken().getType() == HiveParser.TOK_TAB || ast.getToken().getType() == HiveParser.TOK_TABLE_PARTITION || ast.getToken().getType() == HiveParser.TOK_TABTYPE - || ast.getToken().getType() == HiveParser.TOK_CREATETABLE); + || ast.getToken().getType() == HiveParser.TOK_CREATETABLE + || ast.getToken().getType() == HiveParser.TOK_CREATE_MATERIALIZED_VIEW + || ast.getToken().getType() == HiveParser.TOK_REBUILD_MATERIALIZED_VIEW); int childIndex = 0; numDynParts = 0; @@ -1007,7 +1009,9 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit tableName = conf.getVar(HiveConf.ConfVars.HIVETESTMODEPREFIX) + tableName; } - if (ast.getToken().getType() != HiveParser.TOK_CREATETABLE) { + if (ast.getToken().getType() != HiveParser.TOK_CREATETABLE && + ast.getToken().getType() != HiveParser.TOK_CREATE_MATERIALIZED_VIEW && + ast.getToken().getType() != HiveParser.TOK_REBUILD_MATERIALIZED_VIEW) { tableHandle = db.getTable(tableName); } } catch (InvalidTableException ite) { @@ -1019,7 +1023,9 @@ public TableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartit } // get partition metadata if partition specified - if (ast.getChildCount() == 2 && ast.getToken().getType() != HiveParser.TOK_CREATETABLE) { + if (ast.getChildCount() == 2 && ast.getToken().getType() != HiveParser.TOK_CREATETABLE && + ast.getToken().getType() != HiveParser.TOK_CREATE_MATERIALIZED_VIEW && + ast.getToken().getType() != HiveParser.TOK_REBUILD_MATERIALIZED_VIEW) { childIndex = 1; ASTNode partspec = (ASTNode) ast.getChild(1); partitions = new ArrayList(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 7c50155..e9aa542 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -266,7 +266,7 @@ Operator genOPTree(ASTNode ast, PlannerContext plannerCtx) throws SemanticExcept // table, destination), so if the query is otherwise ok, it is as if we // did remove those and gave CBO the proper AST. That is kinda hacky. ASTNode queryForCbo = ast; - if (cboCtx.type == PreCboCtx.Type.CTAS) { + if (cboCtx.type == PreCboCtx.Type.CTAS_OR_MV) { queryForCbo = cboCtx.nodeOfInterest; // nodeOfInterest is the query } runCBO = canCBOHandleAst(queryForCbo, getQB(), cboCtx); @@ -275,6 +275,8 @@ Operator genOPTree(ASTNode ast, PlannerContext plannerCtx) throws SemanticExcept if (runCBO) { disableJoinMerge = true; boolean reAnalyzeAST = false; + final boolean materializedView = getQB().isMaterializedView(); + final boolean rebuild = materializedView && createVwDesc.getOrReplace(); try { if (this.conf.getBoolVar(HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP)) { @@ -286,15 +288,39 @@ Operator genOPTree(ASTNode ast, PlannerContext plannerCtx) throws SemanticExcept // 1. Gen Optimized AST ASTNode newAST = getOptimizedAST(); - // 1.1. Fix up the query for insert/ctas - newAST = fixUpCtasAndInsertAfterCbo(ast, newAST, cboCtx); + // 1.1. Fix up the query for insert/ctas/materialized views + if (!rebuild) { + // If it is not a MATERIALIZED VIEW...REBUILD + newAST = fixUpAfterCbo(ast, newAST, cboCtx); + } // 2. Regen OP plan from optimized AST init(false); - if (cboCtx.type == PreCboCtx.Type.CTAS) { - // Redo create-table analysis, because it's not part of doPhase1. - setAST(newAST); - newAST = reAnalyzeCtasAfterCbo(newAST); + if (cboCtx.type == PreCboCtx.Type.CTAS_OR_MV) { + // Redo create-table/view analysis, because it's not part of doPhase1. + if (materializedView) { + if (rebuild) { + // Use the CREATE MATERIALIZED VIEW...REBUILD + setAST(ast); + reAnalyzeMaterializedViewAfterCbo(ast); + } else { + // Use the REWRITTEN AST + setAST(newAST); + newAST = reAnalyzeMaterializedViewAfterCbo(newAST); + // Store text of the ORIGINAL QUERY + String originalText = ctx.getTokenRewriteStream().toString( + cboCtx.nodeOfInterest.getTokenStartIndex(), + cboCtx.nodeOfInterest.getTokenStopIndex()); + createVwDesc.setViewOriginalText(originalText); + } + viewSelect = newAST; + viewsExpanded = new ArrayList<>(); + viewsExpanded.add(createVwDesc.getViewName()); + } else { + // CTAS + setAST(newAST); + newAST = reAnalyzeCTASAfterCbo(newAST); + } } Phase1Ctx ctx_1 = initPhase1Ctx(); if (!doPhase1(newAST, getQB(), ctx_1, null)) { @@ -393,12 +419,13 @@ boolean canCBOHandleAst(ASTNode ast, QB qb, PreCboCtx cboCtx) { int root = ast.getToken().getType(); boolean needToLogMessage = STATIC_LOG.isInfoEnabled(); boolean isSupportedRoot = root == HiveParser.TOK_QUERY || root == HiveParser.TOK_EXPLAIN - || qb.isCTAS(); + || qb.isCTAS() || qb.isMaterializedView(); // Queries without a source table currently are not supported by CBO boolean isSupportedType = (qb.getIsQuery() && !qb.containsQueryWithoutSourceTable()) - || qb.isCTAS() || cboCtx.type == PreCboCtx.Type.INSERT; + || qb.isCTAS() || qb.isMaterializedView() || cboCtx.type == PreCboCtx.Type.INSERT; boolean noBadTokens = HiveCalciteUtil.validateASTForUnsupportedTokens(ast); - boolean result = isSupportedRoot && isSupportedType && getCreateViewDesc() == null + boolean result = isSupportedRoot && isSupportedType + && (getCreateViewDesc() == null || getCreateViewDesc().isMaterialized()) && noBadTokens; if (!result) { @@ -411,7 +438,7 @@ boolean canCBOHandleAst(ASTNode ast, QB qb, PreCboCtx cboCtx) { msg += "is not a query with at least one source table " + " or there is a subquery without a source table, or CTAS, or insert; "; } - if (getCreateViewDesc() != null) { + if (getCreateViewDesc() != null && !getCreateViewDesc().isMaterialized()) { msg += "has create view; "; } if (!noBadTokens) { @@ -583,7 +610,7 @@ String fixCtasColumnName(String colName) { */ static class PreCboCtx extends PlannerContext { enum Type { - NONE, INSERT, CTAS, UNEXPECTED + NONE, INSERT, CTAS_OR_MV, UNEXPECTED } private ASTNode nodeOfInterest; @@ -601,8 +628,8 @@ private void set(Type type, ASTNode ast) { } @Override - void setCTASToken(ASTNode child) { - set(PreCboCtx.Type.CTAS, child); + void setCTASOrMVToken(ASTNode child) { + set(PreCboCtx.Type.CTAS_OR_MV, child); } @Override @@ -613,7 +640,7 @@ void setInsertToken(ASTNode ast, boolean isTmpFileDest) { } } - ASTNode fixUpCtasAndInsertAfterCbo(ASTNode originalAst, ASTNode newAst, PreCboCtx cboCtx) + ASTNode fixUpAfterCbo(ASTNode originalAst, ASTNode newAst, PreCboCtx cboCtx) throws SemanticException { switch (cboCtx.type) { @@ -621,7 +648,7 @@ ASTNode fixUpCtasAndInsertAfterCbo(ASTNode originalAst, ASTNode newAst, PreCboCt // nothing to do return newAst; - case CTAS: { + case CTAS_OR_MV: { // Patch the optimized query back into original CTAS AST, replacing the // original query. replaceASTChild(cboCtx.nodeOfInterest, newAst); @@ -646,7 +673,7 @@ ASTNode fixUpCtasAndInsertAfterCbo(ASTNode originalAst, ASTNode newAst, PreCboCt } } - ASTNode reAnalyzeCtasAfterCbo(ASTNode newAst) throws SemanticException { + ASTNode reAnalyzeCTASAfterCbo(ASTNode newAst) throws SemanticException { // analyzeCreateTable uses this.ast, but doPhase1 doesn't, so only reset it // here. newAst = analyzeCreateTable(newAst, getQB(), null); @@ -658,6 +685,18 @@ ASTNode reAnalyzeCtasAfterCbo(ASTNode newAst) throws SemanticException { return newAst; } + ASTNode reAnalyzeMaterializedViewAfterCbo(ASTNode newAst) throws SemanticException { + // analyzeCreateView uses this.ast, but doPhase1 doesn't, so only reset it + // here. + newAst = analyzeCreateView(newAst, getQB(), null); + if (newAst == null) { + LOG.error("analyzeCreateTable failed to initialize materialized view after CBO;" + " new ast is " + + getAST().dump()); + throw new SemanticException("analyzeCreateTable failed to initialize materialized view after CBO"); + } + return newAst; + } + /** * Performs breadth-first search of the AST for a nested set of tokens. Tokens * don't have to be each others' direct children, they can be separated by diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 9329e00..8b0db4a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -332,7 +332,7 @@ public void analyzeInternal(ASTNode input) throws SemanticException { break; } case HiveParser.TOK_DROPTABLE: - analyzeDropTable(ast, false); + analyzeDropTable(ast, null); break; case HiveParser.TOK_TRUNCATETABLE: analyzeTruncateTable(ast); @@ -407,7 +407,10 @@ public void analyzeInternal(ASTNode input) throws SemanticException { analyzeMetastoreCheck(ast); break; case HiveParser.TOK_DROPVIEW: - analyzeDropTable(ast, true); + analyzeDropTable(ast, TableType.VIRTUAL_VIEW); + break; + case HiveParser.TOK_DROP_MATERIALIZED_VIEW: + analyzeDropTable(ast, TableType.MATERIALIZED_VIEW); break; case HiveParser.TOK_ALTERVIEW: { String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); @@ -881,7 +884,7 @@ private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { - private void analyzeDropTable(ASTNode ast, boolean expectView) + private void analyzeDropTable(ASTNode ast, TableType expectedType) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null); @@ -899,7 +902,7 @@ private void analyzeDropTable(ASTNode ast, boolean expectView) } boolean ifPurge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null); - DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectView, ifExists, ifPurge, replicationSpec); + DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectedType, ifExists, ifPurge, replicationSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf)); } @@ -1160,7 +1163,7 @@ private void analyzeCreateIndex(ASTNode ast) throws SemanticException { } } - storageFormat.fillDefaultStorageFormat(false); + storageFormat.fillDefaultStorageFormat(false, false); if (indexTableName == null) { indexTableName = MetaStoreUtils.getIndexTableName(qTabName[0], qTabName[1], indexName); indexTableName = qTabName[0] + "." + indexTableName; // on same database with base table @@ -2736,7 +2739,8 @@ private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean addTableDropPartsOutputs(tab, partSpecs.values(), !ifExists); DropTableDesc dropTblDesc = - new DropTableDesc(getDotName(qualified), partSpecs, expectView, mustPurge, replicationSpec); + new DropTableDesc(getDotName(qualified), partSpecs, expectView ? TableType.VIRTUAL_VIEW : null, + mustPurge, replicationSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index a3fcaa0..167f7a5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -127,7 +127,7 @@ static URI getValidatedURI(HiveConf conf, String dcPath) throws SemanticExceptio } static void validateTable(org.apache.hadoop.hive.ql.metadata.Table table) throws SemanticException { - if (table.isView()) { + if (table.isView() || table.isMaterializedView()) { throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg()); } if (table.isNonNative()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index c411f5e..140811d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -250,6 +250,9 @@ TOK_ALTERVIEW_DROPPROPERTIES; TOK_ALTERVIEW_ADDPARTS; TOK_ALTERVIEW_DROPPARTS; TOK_ALTERVIEW_RENAME; +TOK_REBUILD_MATERIALIZED_VIEW; +TOK_CREATE_MATERIALIZED_VIEW; +TOK_DROP_MATERIALIZED_VIEW; TOK_VIEWPARTCOLS; TOK_EXPLAIN; TOK_EXPLAIN_SQ_REWRITE; @@ -783,7 +786,9 @@ ddlStatement | showStatement | metastoreCheck | createViewStatement + | createMaterializedViewStatement | dropViewStatement + | dropMaterializedViewStatement | createFunctionStatement | createMacroStatement | createIndexStatement @@ -1029,6 +1034,7 @@ alterStatement @after { popMsg(state); } : KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^(TOK_ALTERTABLE tableName alterTableStatementSuffix) | KW_ALTER KW_VIEW tableName KW_AS? alterViewStatementSuffix -> ^(TOK_ALTERVIEW tableName alterViewStatementSuffix) + | KW_ALTER KW_MATERIALIZED KW_VIEW tableName KW_REBUILD -> ^(TOK_REBUILD_MATERIALIZED_VIEW tableName) | KW_ALTER KW_INDEX alterIndexStatementSuffix -> alterIndexStatementSuffix | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix ; @@ -1777,6 +1783,25 @@ createViewStatement ) ; +createMaterializedViewStatement +@init { + pushMsg("create materialized view statement", state); +} +@after { popMsg(state); } + : KW_CREATE KW_MATERIALIZED KW_VIEW (ifNotExists)? name=tableName + tableComment? tableRowFormat? tableFileFormat? tableLocation? + tablePropertiesPrefixed? KW_AS selectStatementWithCTE + -> ^(TOK_CREATE_MATERIALIZED_VIEW $name + ifNotExists? + tableComment? + tableRowFormat? + tableFileFormat? + tableLocation? + tablePropertiesPrefixed? + selectStatementWithCTE + ) + ; + viewPartition @init { pushMsg("view partition specification", state); } @after { popMsg(state); } @@ -1790,6 +1815,12 @@ dropViewStatement : KW_DROP KW_VIEW ifExists? viewName -> ^(TOK_DROPVIEW viewName ifExists?) ; +dropMaterializedViewStatement +@init { pushMsg("drop materialized view statement", state); } +@after { popMsg(state); } + : KW_DROP KW_MATERIALIZED KW_VIEW ifExists? viewName -> ^(TOK_DROP_MATERIALIZED_VIEW viewName ifExists?) + ; + showFunctionIdentifier @init { pushMsg("identifier for show function statement", state); } @after { popMsg(state); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index d562ddf..9986fcf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -345,7 +345,7 @@ private CreateTableDesc getBaseCreateTableDescFromTable(String dbName, return TaskFactory.get(new DDLWork( getInputs(), getOutputs(), - new DropTableDesc(table.getTableName(), false, true, true, null) + new DropTableDesc(table.getTableName(), null, true, true, null) ), conf); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java index a49b813..a7005f1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java @@ -199,7 +199,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { // initialize destination table/partition TableSpec ts = new TableSpec(db, conf, (ASTNode) tableTree); - if (ts.tableHandle.isView()) { + if (ts.tableHandle.isView() || ts.tableHandle.isMaterializedView()) { throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg()); } if (ts.tableHandle.isNonNative()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java index b2125ca..0e26463 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; +import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc.SampleDesc; @@ -113,6 +114,7 @@ private AnalyzeRewriteContext analyzeRewrite; private CreateTableDesc createTableDesc; + private CreateViewDesc createViewDesc; private boolean reduceSinkAddedBySortedDynPartition; private Map viewProjectToViewSchema; @@ -126,20 +128,14 @@ public ParseContext() { /** * @param conf - * @param qb - * current QB - * @param ast - * current parse tree * @param opToPartPruner * map from table scan operator to partition pruner * @param opToPartList * @param topOps * list of operators for the top query - * @param opParseCtx - * operator parse context - contains a mapping from operator to - * operator parse state (row resolver etc.) * @param joinOps * context needed join processing (map join specifically) + * @param smbMapJoinOps * @param loadTableWork * list of destination tables being loaded * @param loadFileWork @@ -151,13 +147,19 @@ public ParseContext() { * @param destTableId * @param listMapJoinOpsNoReducer * list of map join operators with no reducer - * @param groupOpToInputTables * @param prunedPartitions * @param opToSamplePruner * operator to sample pruner map * @param globalLimitCtx * @param nameToSplitSample * @param rootTasks + * @param opToPartToSkewedPruner + * @param viewAliasToInput + * @param reduceSinkOperatorsAddedByEnforceBucketingSorting + * @param analyzeRewrite + * @param createTableDesc + * @param createViewDesc + * @param queryProperties */ public ParseContext( QueryState queryState, @@ -180,8 +182,8 @@ public ParseContext( Map viewAliasToInput, List reduceSinkOperatorsAddedByEnforceBucketingSorting, AnalyzeRewriteContext analyzeRewrite, CreateTableDesc createTableDesc, - QueryProperties queryProperties, Map viewProjectToTableSchema, - Set acidFileSinks) { + CreateViewDesc createViewDesc, QueryProperties queryProperties, + Map viewProjectToTableSchema, Set acidFileSinks) { this.queryState = queryState; this.conf = queryState.getConf(); this.opToPartPruner = opToPartPruner; @@ -210,6 +212,7 @@ public ParseContext( reduceSinkOperatorsAddedByEnforceBucketingSorting; this.analyzeRewrite = analyzeRewrite; this.createTableDesc = createTableDesc; + this.createViewDesc = createViewDesc; this.queryProperties = queryProperties; this.viewProjectToViewSchema = viewProjectToTableSchema; this.needViewColumnAuthorization = viewProjectToTableSchema != null @@ -579,6 +582,10 @@ public void setCreateTable(CreateTableDesc createTableDesc) { this.createTableDesc = createTableDesc; } + public CreateViewDesc getCreateViewDesc() { + return createViewDesc; + } + public void setReduceSinkAddedBySortedDynPartition( final boolean reduceSinkAddedBySortedDynPartition) { this.reduceSinkAddedBySortedDynPartition = reduceSinkAddedBySortedDynPartition; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java index 7e732f3..59d537f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java @@ -32,6 +32,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; +import org.apache.hadoop.hive.ql.plan.CreateViewDesc; /** * Implementation of the query block. @@ -63,6 +64,9 @@ private boolean insideView; private Set aliasInsideView; + // If this is a materialized view, this stores the view descriptor + private CreateViewDesc viewDesc; + // used by PTFs /* * This map maintains the PTFInvocationSpec for each PTF chain invocation in this QB. @@ -404,6 +408,18 @@ public QBSubQuery getHavingClauseSubQueryPredicate() { return havingClauseSubQueryPredicate; } + public CreateViewDesc getViewDesc() { + return viewDesc; + } + + public void setViewDesc(CreateViewDesc viewDesc) { + this.viewDesc = viewDesc; + } + + public boolean isMaterializedView() { + return viewDesc != null && viewDesc.isMaterialized(); + } + void addEncryptedTargetTablePath(Path p) { if(encryptedTargetTablePaths == null) { encryptedTargetTablePaths = new ArrayList<>(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 66589fe..7c3094e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -56,7 +56,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hive.common.BlobStorageUtils; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.StatsSetupConst; @@ -155,6 +154,7 @@ import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec; import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec; import org.apache.hadoop.hive.ql.plan.AggregationDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc; @@ -284,9 +284,9 @@ Map> groupOpToInputTables; Map prunedPartitions; protected List resultSchema; - private CreateViewDesc createVwDesc; - private ArrayList viewsExpanded; - private ASTNode viewSelect; + protected CreateViewDesc createVwDesc; + protected ArrayList viewsExpanded; + protected ASTNode viewSelect; protected final UnparseTranslator unparseTranslator; private final GlobalLimitCtx globalLimitCtx; @@ -468,7 +468,7 @@ public ParseContext getParseContext() { listMapJoinOpsNoReducer, prunedPartitions, tabNameToTabObject, opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToPartToSkewedPruner, viewAliasToInput, reduceSinkOperatorsAddedByEnforceBucketingSorting, - analyzeRewrite, tableDesc, queryProperties, viewProjectToTableSchema, acidFileSinks); + analyzeRewrite, tableDesc, createVwDesc, queryProperties, viewProjectToTableSchema, acidFileSinks); } public CompilationOpContext getOpContext() { @@ -2057,7 +2057,7 @@ private void getMetaData(QB qb, ReadEntity parentInput) switch (ast.getToken().getType()) { case HiveParser.TOK_TAB: { TableSpec ts = new TableSpec(db, conf, ast); - if (ts.tableHandle.isView()) { + if (ts.tableHandle.isView() || ts.tableHandle.isMaterializedView()) { throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg()); } @@ -2095,7 +2095,7 @@ private void getMetaData(QB qb, ReadEntity parentInput) if ((!qb.getParseInfo().getIsSubQ()) && (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) { - if (qb.isCTAS()) { + if (qb.isCTAS() || qb.isMaterializedView()) { qb.setIsQuery(false); ctx.setResDir(null); ctx.setResFile(null); @@ -4400,12 +4400,10 @@ public static GenericUDAFEvaluator getGenericUDAFEvaluator(String aggName, /** * Returns the GenericUDAFInfo struct for the aggregation. * - * @param aggName - * The name of the UDAF. + * @param evaluator + * @param emode * @param aggParameters * The exprNodeDesc of the original parameters - * @param aggTree - * The ASTNode node of the UDAF in the query. * @return GenericUDAFInfo * @throws SemanticException * when the UDAF is not found or has problems. @@ -4711,6 +4709,9 @@ private void processGroupingSetReduceSinkOperator(RowResolver reduceSinkInputRow * Generate the GroupByOperator for the Query Block (parseInfo.getXXX(dest)). * The new GroupByOperator will be a child of the reduceSinkOperatorInfo. * + * @param parseInfo + * @param dest + * @param reduceSinkOperatorInfo * @param mode * The mode of the aggregation (MERGEPARTIAL, PARTIAL2) * @param genericUDAFEvaluators @@ -4720,7 +4721,7 @@ private void processGroupingSetReduceSinkOperator(RowResolver reduceSinkInputRow * list of grouping sets * @param groupingSetsPresent * whether grouping sets are present in this query - * @param groupingSetsConsumedCurrentMR + * @param groupingSetsNeedAdditionalMRJob * whether grouping sets are consumed by this group by * @return the new GroupByOperator */ @@ -6792,10 +6793,14 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) // table command rather than taking the default value List field_schemas = null; CreateTableDesc tblDesc = qb.getTableDesc(); + CreateViewDesc viewDesc = qb.getViewDesc(); if (tblDesc != null) { field_schemas = new ArrayList(); destTableIsTemporary = tblDesc.isTemporary(); destTableIsMaterialization = tblDesc.isMaterialization(); + } else if (viewDesc != null) { + field_schemas = new ArrayList(); + destTableIsTemporary = false; } boolean first = true; @@ -6854,6 +6859,8 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) // update the create table descriptor with the resulting schema. if (tblDesc != null) { tblDesc.setCols(new ArrayList(field_schemas)); + } else if (viewDesc != null) { + viewDesc.setSchema(new ArrayList(field_schemas)); } boolean isDestTempFile = true; @@ -6865,11 +6872,13 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) } boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE); - loadFileWork.add(new LoadFileDesc(tblDesc, queryTmpdir, dest_path, isDfsDir, cols, + loadFileWork.add(new LoadFileDesc(tblDesc, viewDesc, queryTmpdir, dest_path, isDfsDir, cols, colTypes)); if (tblDesc == null) { - if (qb.getIsQuery()) { + if (viewDesc != null) { + table_desc = PlanUtils.getTableDesc(viewDesc, cols, colTypes); + } else if (qb.getIsQuery()) { String fileFormat; if (SessionState.get().isHiveServerQuery() && conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS)) { @@ -10419,7 +10428,8 @@ private Operator genLateralViewPlan(QB qb, Operator op, ASTNode lateralViewTree) * * @param source * @param dest - * @param outputColNames + * @param colExprMap + * @param outputInternalColNames * - a list to which the new internal column names will be added, in * the same order as in the dest row resolver */ @@ -10489,7 +10499,7 @@ void setParseTreeAttr(ASTNode child, Phase1Ctx ctx_1) { this.ctx_1 = ctx_1; } - void setCTASToken(ASTNode child) { + void setCTASOrMVToken(ASTNode child) { } void setInsertToken(ASTNode ast, boolean isTmpFileDest) { @@ -10672,10 +10682,12 @@ boolean genResolvedParseTree(ASTNode ast, PlannerContext plannerCtx) throws Sema } // 3. analyze create view command - if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW - || (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveParser.TOK_QUERY)) { - child = analyzeCreateView(ast, qb); - queryState.setCommandType(HiveOperation.CREATEVIEW); + if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW || + ast.getToken().getType() == HiveParser.TOK_CREATE_MATERIALIZED_VIEW || + ast.getToken().getType() == HiveParser.TOK_REBUILD_MATERIALIZED_VIEW || + (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && + ast.getChild(1).getType() == HiveParser.TOK_QUERY)) { + child = analyzeCreateView(ast, qb, plannerCtx); if (child == null) { return false; } @@ -10820,7 +10832,7 @@ void analyzeInternal(ASTNode ast, PlannerContext plannerCtx) throws SemanticExce listMapJoinOpsNoReducer, prunedPartitions, tabNameToTabObject, opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToPartToSkewedPruner, viewAliasToInput, reduceSinkOperatorsAddedByEnforceBucketingSorting, - analyzeRewrite, tableDesc, queryProperties, viewProjectToTableSchema, acidFileSinks); + analyzeRewrite, tableDesc, createVwDesc, queryProperties, viewProjectToTableSchema, acidFileSinks); // 5. Take care of view creation if (createVwDesc != null) { @@ -10828,32 +10840,34 @@ void analyzeInternal(ASTNode ast, PlannerContext plannerCtx) throws SemanticExce // validate the create view statement at this point, the createVwDesc gets // all the information for semanticcheck - validateCreateView(createVwDesc); + validateCreateView(); - // Since we're only creating a view (not executing it), we don't need to - // optimize or translate the plan (and in fact, those procedures can - // interfere with the view creation). So skip the rest of this method. - ctx.setResDir(null); - ctx.setResFile(null); + if (!createVwDesc.isMaterialized()) { + // Since we're only creating a view (not executing it), we don't need to + // optimize or translate the plan (and in fact, those procedures can + // interfere with the view creation). So skip the rest of this method. + ctx.setResDir(null); + ctx.setResFile(null); - try { - PlanUtils.addInputsForView(pCtx); - } catch (HiveException e) { - throw new SemanticException(e); - } + try { + PlanUtils.addInputsForView(pCtx); + } catch (HiveException e) { + throw new SemanticException(e); + } - // Generate lineage info for create view statements - // if LineageLogger hook is configured. - if (HiveConf.getVar(conf, HiveConf.ConfVars.POSTEXECHOOKS).contains( - "org.apache.hadoop.hive.ql.hooks.LineageLogger")) { - ArrayList transformations = new ArrayList(); - transformations.add(new HiveOpConverterPostProc()); - transformations.add(new Generator()); - for (Transform t : transformations) { - pCtx = t.transform(pCtx); + // Generate lineage info for create view statements + // if LineageLogger hook is configured. + if (HiveConf.getVar(conf, HiveConf.ConfVars.POSTEXECHOOKS).contains( + "org.apache.hadoop.hive.ql.hooks.LineageLogger")) { + ArrayList transformations = new ArrayList(); + transformations.add(new HiveOpConverterPostProc()); + transformations.add(new Generator()); + for (Transform t : transformations) { + pCtx = t.transform(pCtx); + } } + return; } - return; } // 6. Generate table access stats if required @@ -10984,6 +10998,11 @@ private void enforceScanLimits(ParseContext pCtx, FetchTask fTask) private void saveViewDefinition() throws SemanticException { + if (createVwDesc.isMaterialized() && createVwDesc.getOrReplace()) { + // This is a rebuild, there's nothing to do here. + return; + } + // Make a copy of the statement's result schema, since we may // modify it below as part of imposing view column names. List derivedSchema = @@ -11002,9 +11021,11 @@ private void saveViewDefinition() throws SemanticException { } // Preserve the original view definition as specified by the user. - String originalText = ctx.getTokenRewriteStream().toString( - viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex()); - createVwDesc.setViewOriginalText(originalText); + if (createVwDesc.getViewOriginalText() == null) { + String originalText = ctx.getTokenRewriteStream().toString( + viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex()); + createVwDesc.setViewOriginalText(originalText); + } // Now expand the view definition with extras such as explicit column // references; this expanded form is what we'll re-parse when the view is @@ -11094,7 +11115,10 @@ private void saveViewDefinition() throws SemanticException { } createVwDesc.setSchema(derivedSchema); - createVwDesc.setViewExpandedText(expandedText); + if (!createVwDesc.isMaterialized()) { + // materialized views don't store the expanded text as they won't be rewritten at query time. + createVwDesc.setViewExpandedText(expandedText); + } } static List convertRowSchemaToViewSchema(RowResolver rr) throws SemanticException { @@ -11478,8 +11502,7 @@ ASTNode analyzeCreateTable( RowFormatParams rowFormatParams = new RowFormatParams(); StorageFormat storageFormat = new StorageFormat(conf); - LOG.info("Creating table " + dbDotTab + " position=" - + ast.getCharPositionInLine()); + LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); /* @@ -11521,6 +11544,7 @@ ASTNode analyzeCreateTable( command_type = CTLT; } break; + case HiveParser.TOK_QUERY: // CTAS if (command_type == CTLT) { throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg()); @@ -11542,7 +11566,7 @@ ASTNode analyzeCreateTable( } command_type = CTAS; if (plannerCtx != null) { - plannerCtx.setCTASToken(child); + plannerCtx.setCTASOrMVToken(child); } selectStmt = child; break; @@ -11603,7 +11627,7 @@ ASTNode analyzeCreateTable( } } - storageFormat.fillDefaultStorageFormat(isExt); + storageFormat.fillDefaultStorageFormat(isExt, false); if ((command_type == CTAS) && (storageFormat.getStorageHandler() != null)) { throw new SemanticException(ErrorMsg.CREATE_NON_NATIVE_AS.getMsg()); @@ -11760,6 +11784,7 @@ ASTNode analyzeCreateTable( queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT); return selectStmt; + default: throw new SemanticException("Unrecognized command."); } @@ -11775,8 +11800,7 @@ private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type) thr outputs.add(new WriteEntity(t, WriteEntity.WriteType.DDL_NO_LOCK)); } - private ASTNode analyzeCreateView(ASTNode ast, QB qb) - throws SemanticException { + protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCtx) throws SemanticException { String[] qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0)); String dbDotTable = getDotName(qualTabName); List cols = null; @@ -11787,12 +11811,21 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb) ASTNode selectStmt = null; Map tblProps = null; List partColNames = null; + boolean isRebuild = ast.getToken().getType() == HiveParser.TOK_REBUILD_MATERIALIZED_VIEW; + boolean isMaterialized = ast.getToken().getType() == HiveParser.TOK_CREATE_MATERIALIZED_VIEW + || isRebuild; + String location = null; + RowFormatParams rowFormatParams = new RowFormatParams(); + StorageFormat storageFormat = new StorageFormat(conf); LOG.info("Creating view " + dbDotTable + " position=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); for (int num = 1; num < numCh; num++) { ASTNode child = (ASTNode) ast.getChild(num); + if (storageFormat.fillStorageFormat(child)) { + continue; + } switch (child.getToken().getType()) { case HiveParser.TOK_IFNOTEXISTS: ifNotExists = true; @@ -11801,6 +11834,10 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb) orReplace = true; break; case HiveParser.TOK_QUERY: + // For CBO + if (plannerCtx != null) { + plannerCtx.setCTASOrMVToken(child); + } selectStmt = child; break; case HiveParser.TOK_TABCOLNAME: @@ -11815,11 +11852,29 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb) case HiveParser.TOK_VIEWPARTCOLS: partColNames = getColumnNames((ASTNode) child.getChild(0)); break; + case HiveParser.TOK_TABLEROWFORMAT: + rowFormatParams.analyzeRowFormat(child); + break; + case HiveParser.TOK_TABLELOCATION: + location = unescapeSQLString(child.getChild(0).getText()); + location = EximUtil.relativeToAbsolutePath(conf, location); + inputs.add(toReadEntity(location)); + break; + case HiveParser.TOK_TABLESERIALIZER: + child = (ASTNode) child.getChild(0); + storageFormat.setSerde(unescapeSQLString(child.getChild(0).getText())); + if (child.getChildCount() == 2) { + readProps((ASTNode) (child.getChild(1).getChild(0)), + storageFormat.getSerdeProps()); + } + break; default: assert false; } } + storageFormat.fillDefaultStorageFormat(false, isMaterialized); + if (ifNotExists && orReplace){ throw new SemanticException("Can't combine IF NOT EXISTS and OR REPLACE."); } @@ -11830,18 +11885,48 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb) orReplace = true; } - StorageFormat defaultFmt = new StorageFormat(conf); - defaultFmt.fillDefaultStorageFormat(false); - createVwDesc = new CreateViewDesc( - dbDotTable, cols, comment, defaultFmt.getInputFormat(), - defaultFmt.getOutputFormat(), tblProps, partColNames, - ifNotExists, orReplace, isAlterViewAs); - unparseTranslator.enable(); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - createVwDesc), conf)); - addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW); + if (isMaterialized) { + createVwDesc = new CreateViewDesc( + dbDotTable, cols, comment, tblProps, partColNames, + ifNotExists, orReplace || isRebuild, isAlterViewAs, storageFormat.getInputFormat(), + storageFormat.getOutputFormat(), location, storageFormat.getSerde(), + storageFormat.getStorageHandler(), storageFormat.getSerdeProps()); + addDbAndTabToOutputs(qualTabName, TableType.MATERIALIZED_VIEW); + queryState.setCommandType(HiveOperation.CREATE_MATERIALIZED_VIEW); + qb.setViewDesc(createVwDesc); + } else { + createVwDesc = new CreateViewDesc( + dbDotTable, cols, comment, tblProps, partColNames, + ifNotExists, orReplace, isAlterViewAs, storageFormat.getInputFormat(), + storageFormat.getOutputFormat(), storageFormat.getSerde()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + createVwDesc), conf)); + addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW); + queryState.setCommandType(HiveOperation.CREATEVIEW); + } + + if (isRebuild) { + // We need to go lookup the table and get the select statement and then parse it. + try { + Table tab = db.getTable(qualTabName[0], qualTabName[1]); + String viewText = tab.getViewOriginalText(); + if (viewText.trim().isEmpty()) { + throw new SemanticException(ErrorMsg.MATERIALIZED_VIEW_DEF_EMPTY); + } + ParseDriver pd = new ParseDriver(); + ASTNode tree = pd.parse(viewText, ctx, false); + selectStmt = ParseUtils.findRootNonNullToken(tree); + // For CBO + if (plannerCtx != null) { + plannerCtx.setCTASOrMVToken(selectStmt); + } + } catch (Exception e) { + throw new SemanticException(e); + } + } + return selectStmt; } @@ -11852,7 +11937,7 @@ CreateViewDesc getCreateViewDesc() { // validate the create view statement // the statement could be CREATE VIEW, REPLACE VIEW, or ALTER VIEW AS SELECT // check semantic conditions - private void validateCreateView(CreateViewDesc createVwDesc) + private void validateCreateView() throws SemanticException { try { Table oldView = getTable(createVwDesc.getViewName(), false); @@ -11881,8 +11966,21 @@ private void validateCreateView(CreateViewDesc createVwDesc) //replace view if (createVwDesc.getOrReplace() && oldView != null) { + // Don't allow swapping between virtual and materialized view in replace + if (oldView.getTableType().equals(TableType.VIRTUAL_VIEW) && createVwDesc.isMaterialized()) { + throw new SemanticException(ErrorMsg.REPLACE_VIEW_WITH_MATERIALIZED, + oldView.getTableName()); + } + + if (oldView.getTableType().equals(TableType.MATERIALIZED_VIEW) && + !createVwDesc.isMaterialized()) { + throw new SemanticException(ErrorMsg.REPLACE_MATERIALIZED_WITH_VIEW, + oldView.getTableName()); + } + // Existing table is not a view - if (!oldView.getTableType().equals(TableType.VIRTUAL_VIEW)) { + if (!oldView.getTableType().equals(TableType.VIRTUAL_VIEW) && + !oldView.getTableType().equals(TableType.MATERIALIZED_VIEW)) { String tableNotViewErrorMsg = "The following is an existing table, not a view: " + createVwDesc.getViewName(); @@ -11890,26 +11988,28 @@ private void validateCreateView(CreateViewDesc createVwDesc) ErrorMsg.EXISTING_TABLE_IS_NOT_VIEW.getMsg(tableNotViewErrorMsg)); } - // if old view has partitions, it could not be replaced - String partitionViewErrorMsg = - "The following view has partition, it could not be replaced: " + - createVwDesc.getViewName(); - try { - if ((createVwDesc.getPartCols() == null || - createVwDesc.getPartCols().isEmpty() || - !createVwDesc.getPartCols().equals(oldView.getPartCols())) && - !oldView.getPartCols().isEmpty() && - !db.getPartitions(oldView).isEmpty()) { + if (!createVwDesc.isMaterialized()) { + // if old view has partitions, it could not be replaced + String partitionViewErrorMsg = + "The following view has partition, it could not be replaced: " + + createVwDesc.getViewName(); + try { + if ((createVwDesc.getPartCols() == null || + createVwDesc.getPartCols().isEmpty() || + !createVwDesc.getPartCols().equals(oldView.getPartCols())) && + !oldView.getPartCols().isEmpty() && + !db.getPartitions(oldView).isEmpty()) { + throw new SemanticException( + ErrorMsg.REPLACE_VIEW_WITH_PARTITION.getMsg(partitionViewErrorMsg)); + } + } catch (HiveException e) { throw new SemanticException( - ErrorMsg.REPLACE_VIEW_WITH_PARTITION.getMsg(partitionViewErrorMsg)); + ErrorMsg.REPLACE_VIEW_WITH_PARTITION.getMsg(partitionViewErrorMsg)); } - } catch (HiveException e) { - throw new SemanticException( - ErrorMsg.REPLACE_VIEW_WITH_PARTITION.getMsg(partitionViewErrorMsg)); } } } catch (HiveException e) { - throw new SemanticException(e.getMessage()); + throw new SemanticException(e.getMessage(), e); } } @@ -13018,6 +13118,7 @@ private void copyInfoToQueryProperties(QueryProperties queryProperties) { queryProperties.setHasOuterOrderBy(!qb.getParseInfo().getIsSubQ() && !qb.getParseInfo().getDestToOrderBy().isEmpty()); queryProperties.setOuterQueryLimit(qb.getParseInfo().getOuterQueryLimit()); + queryProperties.setMaterializedView(qb.getViewDesc() != null); } } private void warn(String msg) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index cc731bf..18439f9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -84,7 +84,9 @@ commandType.put(HiveParser.TOK_CREATEMACRO, HiveOperation.CREATEMACRO); commandType.put(HiveParser.TOK_DROPMACRO, HiveOperation.DROPMACRO); commandType.put(HiveParser.TOK_CREATEVIEW, HiveOperation.CREATEVIEW); + commandType.put(HiveParser.TOK_CREATE_MATERIALIZED_VIEW, HiveOperation.CREATE_MATERIALIZED_VIEW); commandType.put(HiveParser.TOK_DROPVIEW, HiveOperation.DROPVIEW); + commandType.put(HiveParser.TOK_DROP_MATERIALIZED_VIEW, HiveOperation.DROP_MATERIALIZED_VIEW); commandType.put(HiveParser.TOK_CREATEINDEX, HiveOperation.CREATEINDEX); commandType.put(HiveParser.TOK_DROPINDEX, HiveOperation.DROPINDEX); commandType.put(HiveParser.TOK_ALTERINDEX_REBUILD, HiveOperation.ALTERINDEX_REBUILD); @@ -231,6 +233,7 @@ public static BaseSemanticAnalyzer get(QueryState queryState, ASTNode tree) case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DROPVIEW: + case HiveParser.TOK_DROP_MATERIALIZED_VIEW: case HiveParser.TOK_DESCDATABASE: case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_DESCFUNCTION: diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java index 48aca4d..d3b955c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/StorageFormat.java @@ -104,13 +104,22 @@ protected void processStorageFormat(String name) throws SemanticException { } } - protected void fillDefaultStorageFormat(boolean isExternal) throws SemanticException { + protected void fillDefaultStorageFormat(boolean isExternal, boolean isMaterializedView) + throws SemanticException { if ((inputFormat == null) && (storageHandler == null)) { - String defaultFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT); - String defaultManagedFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTMANAGEDFILEFORMAT); + String defaultFormat; + String defaultManagedFormat; + if (isMaterializedView) { + defaultFormat = defaultManagedFormat = + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMATERIALIZEDVIEWFILEFORMAT); + serde = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMATERIALIZEDVIEWSERDE); + } else { + defaultFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT); + defaultManagedFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEDEFAULTMANAGEDFILEFORMAT); + } if (!isExternal && !"none".equals(defaultManagedFormat)) { - defaultFormat = defaultManagedFormat; + defaultFormat = defaultManagedFormat; } if (StringUtils.isBlank(defaultFormat)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 114fa2f..ed3be33 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -56,6 +56,7 @@ import org.apache.hadoop.hive.ql.plan.ColumnStatsDesc; import org.apache.hadoop.hive.ql.plan.ColumnStatsWork; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; +import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.LoadFileDesc; @@ -73,8 +74,6 @@ import org.apache.hadoop.hive.serde2.thrift.ThriftFormatter; import org.apache.hadoop.hive.serde2.thrift.ThriftJDBCBinarySerDe; -import akka.util.Collections; - import com.google.common.collect.Interner; import com.google.common.collect.Interners; @@ -222,18 +221,24 @@ public void compile(final ParseContext pCtx, final List outIter = outputs.iterator(); - while (outIter.hasNext()) { - switch (outIter.next().getType()) { - case DFS_DIR: - case LOCAL_DIR: - outIter.remove(); - break; - default: - break; - } - } Task crtTblTask = TaskFactory.get(new DDLWork( inputs, outputs, crtTblDesc), conf); - - // find all leaf tasks and make the DDLTask as a dependent task of all of - // them - HashSet> leaves = new LinkedHashSet>(); - getLeafTasks(rootTasks, leaves); - assert (leaves.size() > 0); - for (Task task : leaves) { - if (task instanceof StatsTask) { - // StatsTask require table to already exist - for (Task parentOfStatsTask : task.getParentTasks()) { - parentOfStatsTask.addDependentTask(crtTblTask); - } - for (Task parentOfCrtTblTask : crtTblTask.getParentTasks()) { - parentOfCrtTblTask.removeDependentTask(task); - } - crtTblTask.addDependentTask(task); - } else { - task.addDependentTask(crtTblTask); - } - } + patchUpAfterCTASorMaterializedView(rootTasks, outputs, crtTblTask); + } else if (pCtx.getQueryProperties().isMaterializedView()) { + // generate a DDL task and make it a dependent task of the leaf + CreateViewDesc viewDesc = pCtx.getCreateViewDesc(); + Task crtViewTask = TaskFactory.get(new DDLWork( + inputs, outputs, viewDesc), conf); + patchUpAfterCTASorMaterializedView(rootTasks, outputs, crtViewTask); } if (globalLimitCtx.isEnable() && pCtx.getFetchTask() != null) { @@ -372,6 +351,44 @@ public void compile(final ParseContext pCtx, final List> rootTasks, + final HashSet outputs, + Task createTask) { + // clear the mapredWork output file from outputs for CTAS + // DDLWork at the tail of the chain will have the output + Iterator outIter = outputs.iterator(); + while (outIter.hasNext()) { + switch (outIter.next().getType()) { + case DFS_DIR: + case LOCAL_DIR: + outIter.remove(); + break; + default: + break; + } + } + + // find all leaf tasks and make the DDLTask as a dependent task of all of + // them + HashSet> leaves = + new LinkedHashSet>(); + getLeafTasks(rootTasks, leaves); + assert (leaves.size() > 0); + for (Task task : leaves) { + if (task instanceof StatsTask) { + // StatsTask require table to already exist + for (Task parentOfStatsTask : task.getParentTasks()) { + parentOfStatsTask.addDependentTask(createTask); + } + for (Task parentOfCrtTblTask : createTask.getParentTasks()) { + parentOfCrtTblTask.removeDependentTask(task); + } + createTask.addDependentTask(task); + } else { + task.addDependentTask(createTask); + } + } + } /** * A helper function to generate a column stats task on top of map-red task. The column stats @@ -381,7 +398,11 @@ public void compile(final ParseContext pCtx, final List partCols = mTable.getPartCols(); List bucketingCols = mTable.getBucketCols(); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java index 81c4f77..6830bda 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java @@ -36,8 +36,6 @@ private static final long serialVersionUID = 1L; private String viewName; - private String inputFormat; - private String outputFormat; private String originalText; private String expandedText; private List schema; @@ -48,6 +46,13 @@ private boolean ifNotExists; private boolean orReplace; private boolean isAlterViewAs; + private boolean isMaterialized; + private String inputFormat; + private String outputFormat; + private String location; // only used for materialized views + private String serde; // only used for materialized views + private String storageHandler; // only used for materialized views + private Map serdeProps; // only used for materialized views /** * For serialization only. @@ -55,21 +60,75 @@ public CreateViewDesc() { } - public CreateViewDesc(String viewName, List schema, - String comment, String inputFormat, - String outputFormat, Map tblProps, - List partColNames, boolean ifNotExists, - boolean orReplace, boolean isAlterViewAs) { + /** + * Used to create a materialized view descriptor + * @param viewName + * @param schema + * @param comment + * @param tblProps + * @param partColNames + * @param ifNotExists + * @param orReplace + * @param isAlterViewAs + * @param inputFormat + * @param outputFormat + * @param location + * @param serde + * @param storageHandler + * @param serdeProps + */ + public CreateViewDesc(String viewName, List schema, String comment, + Map tblProps, List partColNames, + boolean ifNotExists, boolean orReplace, boolean isAlterViewAs, + String inputFormat, String outputFormat, String location, + String serde, String storageHandler, Map serdeProps) { this.viewName = viewName; this.schema = schema; + this.tblProps = tblProps; + this.partColNames = partColNames; this.comment = comment; + this.ifNotExists = ifNotExists; + this.orReplace = orReplace; + this.isAlterViewAs = isAlterViewAs; + this.isMaterialized = true; this.inputFormat = inputFormat; this.outputFormat = outputFormat; + this.location = location; + this.serde = serde; + this.storageHandler = storageHandler; + this.serdeProps = serdeProps; + } + + /** + * Used to create a view descriptor + * @param viewName + * @param schema + * @param comment + * @param tblProps + * @param partColNames + * @param ifNotExists + * @param orReplace + * @param isAlterViewAs + * @param inputFormat + * @param outputFormat + * @param serde + */ + public CreateViewDesc(String viewName, List schema, String comment, + Map tblProps, List partColNames, + boolean ifNotExists, boolean orReplace, boolean isAlterViewAs, + String inputFormat, String outputFormat, String serde) { + this.viewName = viewName; + this.schema = schema; this.tblProps = tblProps; this.partColNames = partColNames; + this.comment = comment; this.ifNotExists = ifNotExists; this.orReplace = orReplace; this.isAlterViewAs = isAlterViewAs; + this.isMaterialized = false; + this.inputFormat = inputFormat; + this.outputFormat = outputFormat; + this.serde = serde; } @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -194,4 +253,24 @@ public void setOutputFormat(String outputFormat) { this.outputFormat = outputFormat; } + public boolean isMaterialized() { + return isMaterialized; + } + + public String getLocation() { + return location; + } + + public String getSerde() { + return serde; + } + + public String getStorageHandler() { + return storageHandler; + } + + public Map getSerdeProps() { + return serdeProps; + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java index 62c8f7e..74c742c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java @@ -18,14 +18,15 @@ package org.apache.hadoop.hive.ql.plan; -import org.apache.hadoop.hive.ql.parse.ReplicationSpec; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + /** * DropTableDesc. * TODO: this is currently used for both drop table and drop partitions. @@ -53,10 +54,11 @@ public int getPrefixLength() { String tableName; ArrayList partSpecs; - boolean expectView; + TableType expectedType; boolean ifExists; boolean ifPurge; ReplicationSpec replicationSpec; + public DropTableDesc() { } @@ -66,18 +68,18 @@ public DropTableDesc() { * @param ifPurge */ public DropTableDesc( - String tableName, boolean expectView, boolean ifExists, + String tableName, TableType expectedType, boolean ifExists, boolean ifPurge, ReplicationSpec replicationSpec) { this.tableName = tableName; this.partSpecs = null; - this.expectView = expectView; + this.expectedType = expectedType; this.ifExists = ifExists; this.ifPurge = ifPurge; this.replicationSpec = replicationSpec; } public DropTableDesc(String tableName, Map> partSpecs, - boolean expectView, boolean ifPurge, ReplicationSpec replicationSpec) { + TableType expectedType, boolean ifPurge, ReplicationSpec replicationSpec) { this.tableName = tableName; this.partSpecs = new ArrayList(partSpecs.size()); for (Map.Entry> partSpec : partSpecs.entrySet()) { @@ -86,7 +88,7 @@ public DropTableDesc(String tableName, Map inClass = + (Class)JavaUtils.loadClass(crtViewDesc.getInputFormat()); + Class outClass = + (Class)JavaUtils.loadClass(crtViewDesc.getOutputFormat()); + + ret.setInputFileFormatClass(inClass); + ret.setOutputFileFormatClass(outClass); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Unable to find class in getTableDesc: " + e.getMessage(), e); + } + return ret; + } + /** * Generate the table descriptor of MetadataTypedColumnsetSerDe with the * separatorCode. MetaDataTypedColumnsetSerDe is used because LazySimpleSerDe diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java index 82e7fc5..4931e60 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java @@ -424,6 +424,8 @@ public TableWrapper(org.apache.hadoop.hive.metastore.api.Table apiTable) { wrapperApiTable.setTableType(TableType.EXTERNAL_TABLE.toString()); } else if (MetaStoreUtils.isIndexTable(wrapperApiTable)) { wrapperApiTable.setTableType(TableType.INDEX_TABLE.toString()); + } else if (MetaStoreUtils.isMaterializedViewTable(wrapperApiTable)) { + wrapperApiTable.setTableType(TableType.MATERIALIZED_VIEW.toString()); } else if ((wrapperApiTable.getSd() == null) || (wrapperApiTable.getSd().getLocation() == null)) { wrapperApiTable.setTableType(TableType.VIRTUAL_VIEW.toString()); } else { diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java index c507f67..2da4249 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java @@ -82,7 +82,9 @@ CREATEMACRO, DROPMACRO, CREATEVIEW, + CREATE_MATERIALIZED_VIEW, DROPVIEW, + DROP_MATERIALIZED_VIEW, CREATEINDEX, DROPINDEX, ALTERINDEX_REBUILD, diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index bbe28ab..e5389bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -257,6 +257,8 @@ public HivePrivilegeObjectType getObjectType() { (OWNER_PRIV_AR, OWNER_PRIV_AR)); op2Priv.put(HiveOperationType.DROPVIEW, PrivRequirement.newIOPrivRequirement (OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.DROP_MATERIALIZED_VIEW, PrivRequirement.newIOPrivRequirement +(OWNER_PRIV_AR, OWNER_PRIV_AR)); op2Priv.put(HiveOperationType.ANALYZE_TABLE, PrivRequirement.newIOPrivRequirement (arr(SQLPrivTypeGrant.SELECT_NOGRANT, SQLPrivTypeGrant.INSERT_NOGRANT), null)); @@ -332,6 +334,10 @@ public HivePrivilegeObjectType getObjectType() { new PrivRequirement(SEL_GRANT_AR, IOType.INPUT), new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE))); + op2Priv.put(HiveOperationType.CREATE_MATERIALIZED_VIEW, PrivRequirement.newPrivRequirementList( + new PrivRequirement(SEL_GRANT_AR, IOType.INPUT), + new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE))); + op2Priv.put(HiveOperationType.SHOWFUNCTIONS, PrivRequirement.newIOPrivRequirement (null, null)); op2Priv.put(HiveOperationType.SHOWINDEXES, PrivRequirement.newIOPrivRequirement diff --git ql/src/test/queries/clientnegative/materialized_view_authorization_create_no_grant.q ql/src/test/queries/clientnegative/materialized_view_authorization_create_no_grant.q new file mode 100644 index 0000000..079baff --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_authorization_create_no_grant.q @@ -0,0 +1,15 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +create table amvcng_gtable (a int, b varchar(256), c decimal(10,2)); + +insert into amvcng_gtable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +grant select on table amvcng_gtable to user user2; + +set user.name=user2; + +create materialized view amvcng_gmat_view as select a, c from amvcng_gtable; diff --git ql/src/test/queries/clientnegative/materialized_view_authorization_create_no_select_perm.q ql/src/test/queries/clientnegative/materialized_view_authorization_create_no_select_perm.q new file mode 100644 index 0000000..4de525c --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_authorization_create_no_select_perm.q @@ -0,0 +1,14 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +create table amvnsp_table (a int, b varchar(256), c decimal(10,2)); + +insert into amvnsp_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + + +set user.name=user2; + +create materialized view amvnsp_mat_view as select a, c from amvnsp_table; diff --git ql/src/test/queries/clientnegative/materialized_view_authorization_drop_other.q ql/src/test/queries/clientnegative/materialized_view_authorization_drop_other.q new file mode 100644 index 0000000..c9a5930 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_authorization_drop_other.q @@ -0,0 +1,14 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +create table amvdo_table (a int, b varchar(256), c decimal(10,2)); + +insert into amvdo_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view amvdo_mat_view as select a, c from amvdo_table; + +set user.name=user2; +drop materialized view amvdo_mat_view; diff --git ql/src/test/queries/clientnegative/materialized_view_authorization_no_select_perm.q ql/src/test/queries/clientnegative/materialized_view_authorization_no_select_perm.q new file mode 100644 index 0000000..8428152 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_authorization_no_select_perm.q @@ -0,0 +1,14 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +create table amvnsp_table (a int, b varchar(256), c decimal(10,2)); + +insert into amvnsp_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view amvnsp_mat_view as select a, c from amvnsp_table; + +set user.name=user2; +select * from amvnsp_mat_view; diff --git ql/src/test/queries/clientnegative/materialized_view_authorization_rebuild_no_grant.q ql/src/test/queries/clientnegative/materialized_view_authorization_rebuild_no_grant.q new file mode 100644 index 0000000..a2e7d38 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_authorization_rebuild_no_grant.q @@ -0,0 +1,20 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +create table amvrng_table (a int, b varchar(256), c decimal(10,2)); + +insert into amvrng_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +grant select on table amvrng_table to user user2 with grant option; + +set user.name=user2; +create materialized view amvrng_mat_view as select a, c from amvrng_table; + +set user.name=user1; +revoke grant option for select on table amvrng_table from user user2; + +set user.name=user2; +alter materialized view amvrng_mat_view rebuild; diff --git ql/src/test/queries/clientnegative/materialized_view_authorization_rebuild_other.q ql/src/test/queries/clientnegative/materialized_view_authorization_rebuild_other.q new file mode 100644 index 0000000..7c2d145 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_authorization_rebuild_other.q @@ -0,0 +1,14 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +create table amvro_table (a int, b varchar(256), c decimal(10,2)); + +insert into amvro_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view amvro_mat_view as select a, c from amvro_table; + +set user.name=user2; +alter materialized view amvro_mat_view rebuild; diff --git ql/src/test/queries/clientnegative/materialized_view_delete.q ql/src/test/queries/clientnegative/materialized_view_delete.q new file mode 100644 index 0000000..f557df9 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_delete.q @@ -0,0 +1,10 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.enforce.bucketing=true; + +create table dmv_basetable (a int, b varchar(256), c decimal(10,2)); + + +create materialized view dmv_mat_view as select a, b, c from dmv_basetable; + +delete from dmv_mat_view where b = 'fred'; diff --git ql/src/test/queries/clientnegative/materialized_view_drop.q ql/src/test/queries/clientnegative/materialized_view_drop.q new file mode 100644 index 0000000..f16d8a7 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_drop.q @@ -0,0 +1,9 @@ +create table cmv_basetable (a int, b varchar(256), c decimal(10,2)); + +insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view cmv_mat_view as select a, b, c from cmv_basetable; + +drop materialized view cmv_mat_view; + +drop materialized view cmv_basetable; diff --git ql/src/test/queries/clientnegative/materialized_view_drop2.q ql/src/test/queries/clientnegative/materialized_view_drop2.q new file mode 100644 index 0000000..627a78b --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_drop2.q @@ -0,0 +1,7 @@ +create table cmv_basetable (a int, b varchar(256), c decimal(10,2)); + +insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view cmv_mat_view as select a, b, c from cmv_basetable; + +drop view cmv_mat_view; diff --git ql/src/test/queries/clientnegative/materialized_view_insert.q ql/src/test/queries/clientnegative/materialized_view_insert.q new file mode 100644 index 0000000..2daae52 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_insert.q @@ -0,0 +1,6 @@ +create table imv_basetable (a int, b varchar(256), c decimal(10,2)); + + +create materialized view imv_mat_view as select a, b, c from imv_basetable; + +insert into imv_mat_view values (1, 'fred', 3.14); diff --git ql/src/test/queries/clientnegative/materialized_view_load.q ql/src/test/queries/clientnegative/materialized_view_load.q new file mode 100644 index 0000000..a8d0dfd --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_load.q @@ -0,0 +1,7 @@ +create table lmv_basetable (a int, b varchar(256), c decimal(10,2)); + + +create materialized view lmv_mat_view as select a, b, c from lmv_basetable; + +LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE lmv_mat_view; + diff --git ql/src/test/queries/clientnegative/materialized_view_replace_with_view.q ql/src/test/queries/clientnegative/materialized_view_replace_with_view.q new file mode 100644 index 0000000..7c972f5 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_replace_with_view.q @@ -0,0 +1,8 @@ +create table rmvwv_basetable (a int, b varchar(256), c decimal(10,2)); + +insert into rmvwv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view rmvwv_mat_view as select a, b, c from rmvwv_basetable; + +create or replace view rmvwv_mat_view as select a, c from rmvwv_basetable; + diff --git ql/src/test/queries/clientnegative/materialized_view_update.q ql/src/test/queries/clientnegative/materialized_view_update.q new file mode 100644 index 0000000..8245ef0 --- /dev/null +++ ql/src/test/queries/clientnegative/materialized_view_update.q @@ -0,0 +1,10 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.enforce.bucketing=true; + +create table umv_basetable (a int, b varchar(256), c decimal(10,2)); + + +create materialized view umv_mat_view as select a, b, c from umv_basetable; + +update umv_mat_view set b = 'joe' where b = 'fred'; diff --git ql/src/test/queries/clientnegative/view_delete.q ql/src/test/queries/clientnegative/view_delete.q new file mode 100644 index 0000000..a19b0e3 --- /dev/null +++ ql/src/test/queries/clientnegative/view_delete.q @@ -0,0 +1,9 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.enforce.bucketing=true; + +create table dv_basetable (a int, b varchar(256), c decimal(10,2)); + +create view dv_view as select a, b, c from dv_basetable; + +delete from dv_view where b = 'fred'; diff --git ql/src/test/queries/clientnegative/view_update.q ql/src/test/queries/clientnegative/view_update.q new file mode 100644 index 0000000..af27ae6 --- /dev/null +++ ql/src/test/queries/clientnegative/view_update.q @@ -0,0 +1,9 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.enforce.bucketing=true; + +create table uv_basetable (a int, b varchar(256), c decimal(10,2)); + +create view uv_view as select a, b, c from uv_basetable; + +update uv_view set b = 'joe' where b = 'fred'; diff --git ql/src/test/queries/clientpositive/authorization_view_sqlstd.q ql/src/test/queries/clientpositive/authorization_view_sqlstd.q deleted file mode 100644 index 14044bf..0000000 --- ql/src/test/queries/clientpositive/authorization_view_sqlstd.q +++ /dev/null @@ -1,86 +0,0 @@ -set hive.test.authz.sstd.hs2.mode=true; -set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; -set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; -set hive.security.authorization.enabled=true; -set user.name=user1; - --- Test view authorization , and 'show grant' variants - -create table t1(i int, j int, k int); -show grant user user1 on table t1; - --- protecting certain columns -create view vt1 as select i,k from t1; - --- protecting certain rows -create view vt2 as select * from t1 where i > 1; - -show grant user user1 on all; - ---view grant to user --- try with and without table keyword - -grant select on vt1 to user user2; -grant insert on table vt1 to user user3; - -set user.name=user2; -show grant user user2 on table vt1; -set user.name=user3; -show grant user user3 on table vt1; - - -set user.name=user2; - -explain authorization select * from vt1; -select * from vt1; - --- verify input objects required does not include table --- even if view is within a sub query -select * from (select * from vt1) a; - -select * from vt1 union all select * from vt1; - -set user.name=user1; - -grant all on table vt2 to user user2; - -set user.name=user2; -show grant user user2 on table vt2; -show grant user user2 on all; -set user.name=user1; - -revoke all on vt2 from user user2; - -set user.name=user2; -show grant user user2 on table vt2; - - -set user.name=hive_admin_user; -set role admin; -show grant on table vt2; - -set user.name=user1; -revoke select on table vt1 from user user2; - -set user.name=user2; -show grant user user2 on table vt1; -show grant user user2 on all; - -set user.name=user3; --- grant privileges on roles for view, after next statement -show grant user user3 on table vt1; - -set user.name=hive_admin_user; -show current roles; -set role ADMIN; -create role role_v; -grant role_v to user user4 ; -show role grant user user4; -show roles; - -grant all on table vt2 to role role_v; -show grant role role_v on table vt2; - -revoke delete on table vt2 from role role_v; -show grant role role_v on table vt2; -show grant on table vt2; diff --git ql/src/test/queries/clientpositive/materialized_view_authorization_sqlstd.q ql/src/test/queries/clientpositive/materialized_view_authorization_sqlstd.q new file mode 100644 index 0000000..87a53e9 --- /dev/null +++ ql/src/test/queries/clientpositive/materialized_view_authorization_sqlstd.q @@ -0,0 +1,58 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +create table amvs_table (a int, b varchar(256), c decimal(10,2)); + +insert into amvs_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view amvs_mat_view as select a, c from amvs_table; + +show grant user user1 on table amvs_mat_view; + +grant select on amvs_mat_view to user user2; + +set user.name=user2; +show grant user user2 on table amvs_mat_view; +select * from amvs_mat_view; + +set user.name=user3; +show grant user user3 on table amvs_mat_view; + + +set user.name=hive_admin_user; +set role admin; +show grant on table amvs_mat_view; + +set user.name=user1; +revoke select on table amvs_mat_view from user user2; +set user.name=user2; +show grant user user2 on table amvs_mat_view; + +set user.name=hive_admin_user; +set role ADMIN; +create role role_v; +grant role_v to user user4 ; +show role grant user user4; +show roles; + +grant all on table amvs_mat_view to role role_v; +show grant role role_v on table amvs_mat_view; +show grant user user4 on table amvs_mat_view; +select * from amvs_mat_view; + +set user.name=user1; +alter materialized view amvs_mat_view rebuild; + +grant select on table amvs_table to user user2 with grant option; +set user.name=user2; +create materialized view amvs_mat_view2 as select a, b from amvs_table; + +select * from amvs_mat_view2; + +drop materialized view amvs_mat_view2; + +set user.name=hive_admin_user; +set role ADMIN; diff --git ql/src/test/queries/clientpositive/materialized_view_create.q ql/src/test/queries/clientpositive/materialized_view_create.q new file mode 100644 index 0000000..57f8bb7 --- /dev/null +++ ql/src/test/queries/clientpositive/materialized_view_create.q @@ -0,0 +1,31 @@ +create table cmv_basetable (a int, b varchar(256), c decimal(10,2)); + +insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view cmv_mat_view as select a, b, c from cmv_basetable; + +select * from cmv_mat_view; + +create materialized view if not exists cmv_mat_view2 as select a, c from cmv_basetable; + +select * from cmv_mat_view2; + +create materialized view if not exists cmv_mat_view3 as select * from cmv_basetable where a > 1; + +select * from cmv_mat_view3; + +create materialized view cmv_mat_view4 comment 'this is a comment' as select a, sum(c) from cmv_basetable group by a; + +select * from cmv_mat_view4; + +describe extended cmv_mat_view4; + +create table cmv_basetable2 (d int, e varchar(256), f decimal(10,2)); + +insert into cmv_basetable2 values (4, 'alfred', 100.30),(4, 'bob', 6133,14),(5, 'bonnie', 172.2),(6, 'calvin', 8.76),(17, 'charlie', 13144339.8); + +create materialized view cmv_mat_view5 tblproperties ('key'='value') as select a, b, d, c, f from cmv_basetable t1 join cmv_basetable2 t2 on (t1.b = t2.e); + +select * from cmv_mat_view5; + +show tblproperties cmv_mat_view5; diff --git ql/src/test/queries/clientpositive/materialized_view_describe.q ql/src/test/queries/clientpositive/materialized_view_describe.q new file mode 100644 index 0000000..1068b0d --- /dev/null +++ ql/src/test/queries/clientpositive/materialized_view_describe.q @@ -0,0 +1,63 @@ +create table cmv_basetable (a int, b varchar(256), c decimal(10,2)); + +insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8); + +create materialized view cmv_mat_view +comment 'this is the first view' +tblproperties ('key'='foo') as select a, c from cmv_basetable; + +describe cmv_mat_view; + +describe extended cmv_mat_view; + +describe formatted cmv_mat_view; + +show tblproperties cmv_mat_view; + +select a, c from cmv_mat_view; + +drop materialized view cmv_mat_view; + +create materialized view cmv_mat_view2 +comment 'this is the second view' +stored as textfile +tblproperties ('key'='alice','key2'='bob') as select a from cmv_basetable; + +describe formatted cmv_mat_view2; + +select a from cmv_mat_view2; + +drop materialized view cmv_mat_view2; + +create materialized view cmv_mat_view3 +comment 'this is the third view' +row format + delimited fields terminated by '\t' +as select * from cmv_basetable; + +describe formatted cmv_mat_view3; + +select a, b, c from cmv_mat_view3; + +select distinct a from cmv_mat_view3; + +drop materialized view cmv_mat_view3; + +dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/t; + +create materialized view cmv_mat_view4 +comment 'this is the last view' +stored as textfile +location '${system:test.tmp.dir}/t' +as select a from cmv_basetable; + +describe formatted cmv_mat_view4; + +alter materialized view cmv_mat_view4 rebuild; + +describe formatted cmv_mat_view4; + +select a from cmv_mat_view4; + +drop materialized view cmv_mat_view4; + diff --git ql/src/test/queries/clientpositive/materialized_view_drop.q ql/src/test/queries/clientpositive/materialized_view_drop.q new file mode 100644 index 0000000..8897159 --- /dev/null +++ ql/src/test/queries/clientpositive/materialized_view_drop.q @@ -0,0 +1,7 @@ +create materialized view dmv_mat_view as select cint, cstring1 from alltypesorc where cint < 0; + +show table extended like dmv_mat_view; + +drop materialized view dmv_mat_view; + +show table extended like dmv_mat_view; diff --git ql/src/test/queries/clientpositive/materialized_view_rebuild.q ql/src/test/queries/clientpositive/materialized_view_rebuild.q new file mode 100644 index 0000000..c0a5530 --- /dev/null +++ ql/src/test/queries/clientpositive/materialized_view_rebuild.q @@ -0,0 +1,13 @@ +create table rmv_table (cint int, cstring1 string); + +insert into rmv_table values(1, 'fred'), (10, 'wilma'); + +create materialized view rmv_mat_view as select cint, cstring1 from rmv_table where cint < 10; + +select * from rmv_mat_view; + +insert into rmv_table values(2, 'barney'), (11, 'betty'); + +alter materialized view rmv_mat_view rebuild; + +select * from rmv_mat_view; diff --git ql/src/test/queries/clientpositive/view_authorization_sqlstd.q ql/src/test/queries/clientpositive/view_authorization_sqlstd.q new file mode 100644 index 0000000..64c5520 --- /dev/null +++ ql/src/test/queries/clientpositive/view_authorization_sqlstd.q @@ -0,0 +1,89 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +-- Test view authorization , and 'show grant' variants + +create table t1(i int, j int, k int); +grant select on t1 to user user2 with grant option; +show grant user user1 on table t1; + +-- protecting certain columns +create view vt1 as select i,k from t1; + +-- protecting certain rows +create view vt2 as select * from t1 where i > 1; + +show grant user user1 on all; + +--view grant to user +-- try with and without table keyword + +grant select on vt1 to user user2; +grant insert on table vt1 to user user3; + +set user.name=user2; +show grant user user2 on table vt1; +create view vt3 as select i,k from t1; + +set user.name=user3; +show grant user user3 on table vt1; + + +set user.name=user2; + +explain authorization select * from vt1; +select * from vt1; + +-- verify input objects required does not include table +-- even if view is within a sub query +select * from (select * from vt1) a; + +select * from vt1 union all select * from vt1; + +set user.name=user1; + +grant all on table vt2 to user user2; + +set user.name=user2; +show grant user user2 on table vt2; +show grant user user2 on all; +set user.name=user1; + +revoke all on vt2 from user user2; + +set user.name=user2; +show grant user user2 on table vt2; + + +set user.name=hive_admin_user; +set role admin; +show grant on table vt2; + +set user.name=user1; +revoke select on table vt1 from user user2; + +set user.name=user2; +show grant user user2 on table vt1; +show grant user user2 on all; + +set user.name=user3; +-- grant privileges on roles for view, after next statement +show grant user user3 on table vt1; + +set user.name=hive_admin_user; +show current roles; +set role ADMIN; +create role role_v; +grant role_v to user user4 ; +show role grant user user4; +show roles; + +grant all on table vt2 to role role_v; +show grant role role_v on table vt2; + +revoke delete on table vt2 from role role_v; +show grant role role_v on table vt2; +show grant on table vt2; diff --git ql/src/test/results/clientnegative/materialized_view_authorization_create_no_grant.q.out ql/src/test/results/clientnegative/materialized_view_authorization_create_no_grant.q.out new file mode 100644 index 0000000..8c6da0d --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_authorization_create_no_grant.q.out @@ -0,0 +1,26 @@ +PREHOOK: query: create table amvcng_gtable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@amvcng_gtable +POSTHOOK: query: create table amvcng_gtable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvcng_gtable +PREHOOK: query: insert into amvcng_gtable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@amvcng_gtable +POSTHOOK: query: insert into amvcng_gtable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@amvcng_gtable +POSTHOOK: Lineage: amvcng_gtable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: amvcng_gtable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: amvcng_gtable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: grant select on table amvcng_gtable to user user2 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@amvcng_gtable +POSTHOOK: query: grant select on table amvcng_gtable to user user2 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@amvcng_gtable +FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation CREATE_MATERIALIZED_VIEW [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.amvcng_gtable]] diff --git ql/src/test/results/clientnegative/materialized_view_authorization_create_no_select_perm.q.out ql/src/test/results/clientnegative/materialized_view_authorization_create_no_select_perm.q.out new file mode 100644 index 0000000..67d3474 --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_authorization_create_no_select_perm.q.out @@ -0,0 +1,20 @@ +PREHOOK: query: create table amvnsp_table (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@amvnsp_table +POSTHOOK: query: create table amvnsp_table (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvnsp_table +PREHOOK: query: insert into amvnsp_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@amvnsp_table +POSTHOOK: query: insert into amvnsp_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@amvnsp_table +POSTHOOK: Lineage: amvnsp_table.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: amvnsp_table.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: amvnsp_table.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation CREATE_MATERIALIZED_VIEW [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.amvnsp_table]] diff --git ql/src/test/results/clientnegative/materialized_view_authorization_drop_other.q.out ql/src/test/results/clientnegative/materialized_view_authorization_drop_other.q.out new file mode 100644 index 0000000..9be10d6 --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_authorization_drop_other.q.out @@ -0,0 +1,30 @@ +PREHOOK: query: create table amvdo_table (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@amvdo_table +POSTHOOK: query: create table amvdo_table (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvdo_table +PREHOOK: query: insert into amvdo_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@amvdo_table +POSTHOOK: query: insert into amvdo_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@amvdo_table +POSTHOOK: Lineage: amvdo_table.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: amvdo_table.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: amvdo_table.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view amvdo_mat_view as select a, c from amvdo_table +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@amvdo_table +PREHOOK: Output: database:default +PREHOOK: Output: default@amvdo_mat_view +POSTHOOK: query: create materialized view amvdo_mat_view as select a, c from amvdo_table +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@amvdo_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvdo_mat_view +FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation DROP_MATERIALIZED_VIEW [[OBJECT OWNERSHIP] on Object [type=TABLE_OR_VIEW, name=default.amvdo_mat_view], [OBJECT OWNERSHIP] on Object [type=TABLE_OR_VIEW, name=default.amvdo_mat_view]] diff --git ql/src/test/results/clientnegative/materialized_view_authorization_no_select_perm.q.out ql/src/test/results/clientnegative/materialized_view_authorization_no_select_perm.q.out new file mode 100644 index 0000000..57f992f --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_authorization_no_select_perm.q.out @@ -0,0 +1,30 @@ +PREHOOK: query: create table amvnsp_table (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@amvnsp_table +POSTHOOK: query: create table amvnsp_table (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvnsp_table +PREHOOK: query: insert into amvnsp_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@amvnsp_table +POSTHOOK: query: insert into amvnsp_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@amvnsp_table +POSTHOOK: Lineage: amvnsp_table.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: amvnsp_table.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: amvnsp_table.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view amvnsp_mat_view as select a, c from amvnsp_table +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@amvnsp_table +PREHOOK: Output: database:default +PREHOOK: Output: default@amvnsp_mat_view +POSTHOOK: query: create materialized view amvnsp_mat_view as select a, c from amvnsp_table +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@amvnsp_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvnsp_mat_view +FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation QUERY [[SELECT] on Object [type=TABLE_OR_VIEW, name=default.amvnsp_mat_view]] diff --git ql/src/test/results/clientnegative/materialized_view_authorization_rebuild_no_grant.q.out ql/src/test/results/clientnegative/materialized_view_authorization_rebuild_no_grant.q.out new file mode 100644 index 0000000..49cdcd7 --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_authorization_rebuild_no_grant.q.out @@ -0,0 +1,42 @@ +PREHOOK: query: create table amvrng_table (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@amvrng_table +POSTHOOK: query: create table amvrng_table (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvrng_table +PREHOOK: query: insert into amvrng_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@amvrng_table +POSTHOOK: query: insert into amvrng_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@amvrng_table +POSTHOOK: Lineage: amvrng_table.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: amvrng_table.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: amvrng_table.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: grant select on table amvrng_table to user user2 with grant option +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@amvrng_table +POSTHOOK: query: grant select on table amvrng_table to user user2 with grant option +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@amvrng_table +PREHOOK: query: create materialized view amvrng_mat_view as select a, c from amvrng_table +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@amvrng_table +PREHOOK: Output: database:default +PREHOOK: Output: default@amvrng_mat_view +POSTHOOK: query: create materialized view amvrng_mat_view as select a, c from amvrng_table +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@amvrng_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvrng_mat_view +PREHOOK: query: revoke grant option for select on table amvrng_table from user user2 +PREHOOK: type: REVOKE_PRIVILEGE +PREHOOK: Output: default@amvrng_table +POSTHOOK: query: revoke grant option for select on table amvrng_table from user user2 +POSTHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: Output: default@amvrng_table +FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation CREATE_MATERIALIZED_VIEW [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.amvrng_table]] diff --git ql/src/test/results/clientnegative/materialized_view_authorization_rebuild_other.q.out ql/src/test/results/clientnegative/materialized_view_authorization_rebuild_other.q.out new file mode 100644 index 0000000..933db64 --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_authorization_rebuild_other.q.out @@ -0,0 +1,30 @@ +PREHOOK: query: create table amvro_table (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@amvro_table +POSTHOOK: query: create table amvro_table (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvro_table +PREHOOK: query: insert into amvro_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@amvro_table +POSTHOOK: query: insert into amvro_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@amvro_table +POSTHOOK: Lineage: amvro_table.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: amvro_table.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: amvro_table.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view amvro_mat_view as select a, c from amvro_table +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@amvro_table +PREHOOK: Output: database:default +PREHOOK: Output: default@amvro_mat_view +POSTHOOK: query: create materialized view amvro_mat_view as select a, c from amvro_table +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@amvro_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvro_mat_view +FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation CREATE_MATERIALIZED_VIEW [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.amvro_table]] diff --git ql/src/test/results/clientnegative/materialized_view_delete.q.out ql/src/test/results/clientnegative/materialized_view_delete.q.out new file mode 100644 index 0000000..c4a5887 --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_delete.q.out @@ -0,0 +1,19 @@ +PREHOOK: query: create table dmv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@dmv_basetable +POSTHOOK: query: create table dmv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dmv_basetable +PREHOOK: query: create materialized view dmv_mat_view as select a, b, c from dmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@dmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@dmv_mat_view +POSTHOOK: query: create materialized view dmv_mat_view as select a, b, c from dmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@dmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dmv_mat_view +FAILED: SemanticException [Error 10402]: You cannot update or delete records in a view diff --git ql/src/test/results/clientnegative/materialized_view_drop.q.out ql/src/test/results/clientnegative/materialized_view_drop.q.out new file mode 100644 index 0000000..f3ac66c --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_drop.q.out @@ -0,0 +1,42 @@ +PREHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_basetable +PREHOOK: query: insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@cmv_basetable +POSTHOOK: Lineage: cmv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view cmv_mat_view as select a, b, c from cmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view +POSTHOOK: query: create materialized view cmv_mat_view as select a, b, c from cmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view +PREHOOK: query: drop materialized view cmv_mat_view +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_mat_view +PREHOOK: Output: default@cmv_mat_view +POSTHOOK: query: drop materialized view cmv_mat_view +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_mat_view +POSTHOOK: Output: default@cmv_mat_view +PREHOOK: query: drop materialized view cmv_basetable +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: default@cmv_basetable +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot drop a base table with DROP MATERIALIZED VIEW diff --git ql/src/test/results/clientnegative/materialized_view_drop2.q.out ql/src/test/results/clientnegative/materialized_view_drop2.q.out new file mode 100644 index 0000000..46a9e1f --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_drop2.q.out @@ -0,0 +1,34 @@ +PREHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_basetable +PREHOOK: query: insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@cmv_basetable +POSTHOOK: Lineage: cmv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view cmv_mat_view as select a, b, c from cmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view +POSTHOOK: query: create materialized view cmv_mat_view as select a, b, c from cmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view +PREHOOK: query: drop view cmv_mat_view +PREHOOK: type: DROPVIEW +PREHOOK: Input: default@cmv_mat_view +PREHOOK: Output: default@cmv_mat_view +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot drop a materialized view with DROP VIEW diff --git ql/src/test/results/clientnegative/materialized_view_insert.q.out ql/src/test/results/clientnegative/materialized_view_insert.q.out new file mode 100644 index 0000000..3e1f591 --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_insert.q.out @@ -0,0 +1,19 @@ +PREHOOK: query: create table imv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@imv_basetable +POSTHOOK: query: create table imv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@imv_basetable +PREHOOK: query: create materialized view imv_mat_view as select a, b, c from imv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@imv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@imv_mat_view +POSTHOOK: query: create materialized view imv_mat_view as select a, b, c from imv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@imv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@imv_mat_view +FAILED: SemanticException [Error 10090]: A view cannot be used as target table for LOAD or INSERT diff --git ql/src/test/results/clientnegative/materialized_view_load.q.out ql/src/test/results/clientnegative/materialized_view_load.q.out new file mode 100644 index 0000000..b48bba4 --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_load.q.out @@ -0,0 +1,19 @@ +PREHOOK: query: create table lmv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@lmv_basetable +POSTHOOK: query: create table lmv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@lmv_basetable +PREHOOK: query: create materialized view lmv_mat_view as select a, b, c from lmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@lmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@lmv_mat_view +POSTHOOK: query: create materialized view lmv_mat_view as select a, b, c from lmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@lmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@lmv_mat_view +FAILED: SemanticException [Error 10090]: A view cannot be used as target table for LOAD or INSERT diff --git ql/src/test/results/clientnegative/materialized_view_replace_with_view.q.out ql/src/test/results/clientnegative/materialized_view_replace_with_view.q.out new file mode 100644 index 0000000..45160ad --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_replace_with_view.q.out @@ -0,0 +1,30 @@ +PREHOOK: query: create table rmvwv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@rmvwv_basetable +POSTHOOK: query: create table rmvwv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@rmvwv_basetable +PREHOOK: query: insert into rmvwv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@rmvwv_basetable +POSTHOOK: query: insert into rmvwv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@rmvwv_basetable +POSTHOOK: Lineage: rmvwv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: rmvwv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: rmvwv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view rmvwv_mat_view as select a, b, c from rmvwv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@rmvwv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@rmvwv_mat_view +POSTHOOK: query: create materialized view rmvwv_mat_view as select a, b, c from rmvwv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@rmvwv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@rmvwv_mat_view +FAILED: SemanticException [Error 10401]: Attempt to replace materialized view rmvwv_mat_view with view diff --git ql/src/test/results/clientnegative/materialized_view_update.q.out ql/src/test/results/clientnegative/materialized_view_update.q.out new file mode 100644 index 0000000..b9e7761 --- /dev/null +++ ql/src/test/results/clientnegative/materialized_view_update.q.out @@ -0,0 +1,19 @@ +PREHOOK: query: create table umv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@umv_basetable +POSTHOOK: query: create table umv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@umv_basetable +PREHOOK: query: create materialized view umv_mat_view as select a, b, c from umv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@umv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@umv_mat_view +POSTHOOK: query: create materialized view umv_mat_view as select a, b, c from umv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@umv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@umv_mat_view +FAILED: SemanticException [Error 10402]: You cannot update or delete records in a view diff --git ql/src/test/results/clientnegative/view_delete.q.out ql/src/test/results/clientnegative/view_delete.q.out new file mode 100644 index 0000000..24e7ba5 --- /dev/null +++ ql/src/test/results/clientnegative/view_delete.q.out @@ -0,0 +1,19 @@ +PREHOOK: query: create table dv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@dv_basetable +POSTHOOK: query: create table dv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dv_basetable +PREHOOK: query: create view dv_view as select a, b, c from dv_basetable +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@dv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@dv_view +POSTHOOK: query: create view dv_view as select a, b, c from dv_basetable +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@dv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dv_view +FAILED: SemanticException [Error 10402]: You cannot update or delete records in a view diff --git ql/src/test/results/clientnegative/view_update.q.out ql/src/test/results/clientnegative/view_update.q.out new file mode 100644 index 0000000..aaee442 --- /dev/null +++ ql/src/test/results/clientnegative/view_update.q.out @@ -0,0 +1,19 @@ +PREHOOK: query: create table uv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@uv_basetable +POSTHOOK: query: create table uv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@uv_basetable +PREHOOK: query: create view uv_view as select a, b, c from uv_basetable +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@uv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@uv_view +POSTHOOK: query: create view uv_view as select a, b, c from uv_basetable +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@uv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@uv_view +FAILED: SemanticException [Error 10402]: You cannot update or delete records in a view diff --git ql/src/test/results/clientpositive/authorization_view_sqlstd.q.out ql/src/test/results/clientpositive/authorization_view_sqlstd.q.out deleted file mode 100644 index d2fab2f..0000000 --- ql/src/test/results/clientpositive/authorization_view_sqlstd.q.out +++ /dev/null @@ -1,269 +0,0 @@ -PREHOOK: query: -- Test view authorization , and 'show grant' variants - -create table t1(i int, j int, k int) -PREHOOK: type: CREATETABLE -PREHOOK: Output: database:default -PREHOOK: Output: default@t1 -POSTHOOK: query: -- Test view authorization , and 'show grant' variants - -create table t1(i int, j int, k int) -POSTHOOK: type: CREATETABLE -POSTHOOK: Output: database:default -POSTHOOK: Output: default@t1 -PREHOOK: query: show grant user user1 on table t1 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user1 on table t1 -POSTHOOK: type: SHOW_GRANT -default t1 user1 USER DELETE true -1 user1 -default t1 user1 USER INSERT true -1 user1 -default t1 user1 USER SELECT true -1 user1 -default t1 user1 USER UPDATE true -1 user1 -PREHOOK: query: -- protecting certain columns -create view vt1 as select i,k from t1 -PREHOOK: type: CREATEVIEW -PREHOOK: Input: default@t1 -PREHOOK: Output: database:default -PREHOOK: Output: default@vt1 -POSTHOOK: query: -- protecting certain columns -create view vt1 as select i,k from t1 -POSTHOOK: type: CREATEVIEW -POSTHOOK: Input: default@t1 -POSTHOOK: Output: database:default -POSTHOOK: Output: default@vt1 -PREHOOK: query: -- protecting certain rows -create view vt2 as select * from t1 where i > 1 -PREHOOK: type: CREATEVIEW -PREHOOK: Input: default@t1 -PREHOOK: Output: database:default -PREHOOK: Output: default@vt2 -POSTHOOK: query: -- protecting certain rows -create view vt2 as select * from t1 where i > 1 -POSTHOOK: type: CREATEVIEW -POSTHOOK: Input: default@t1 -POSTHOOK: Output: database:default -POSTHOOK: Output: default@vt2 -PREHOOK: query: show grant user user1 on all -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user1 on all -POSTHOOK: type: SHOW_GRANT -default t1 user1 USER DELETE true -1 user1 -default t1 user1 USER INSERT true -1 user1 -default t1 user1 USER SELECT true -1 user1 -default t1 user1 USER UPDATE true -1 user1 -default vt1 user1 USER DELETE true -1 user1 -default vt1 user1 USER INSERT true -1 user1 -default vt1 user1 USER SELECT true -1 user1 -default vt1 user1 USER UPDATE true -1 user1 -default vt2 user1 USER DELETE true -1 user1 -default vt2 user1 USER INSERT true -1 user1 -default vt2 user1 USER SELECT true -1 user1 -default vt2 user1 USER UPDATE true -1 user1 -PREHOOK: query: --view grant to user --- try with and without table keyword - -grant select on vt1 to user user2 -PREHOOK: type: GRANT_PRIVILEGE -PREHOOK: Output: default@vt1 -POSTHOOK: query: --view grant to user --- try with and without table keyword - -grant select on vt1 to user user2 -POSTHOOK: type: GRANT_PRIVILEGE -POSTHOOK: Output: default@vt1 -PREHOOK: query: grant insert on table vt1 to user user3 -PREHOOK: type: GRANT_PRIVILEGE -PREHOOK: Output: default@vt1 -POSTHOOK: query: grant insert on table vt1 to user user3 -POSTHOOK: type: GRANT_PRIVILEGE -POSTHOOK: Output: default@vt1 -PREHOOK: query: show grant user user2 on table vt1 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user2 on table vt1 -POSTHOOK: type: SHOW_GRANT -default vt1 user2 USER SELECT false -1 user1 -PREHOOK: query: show grant user user3 on table vt1 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user3 on table vt1 -POSTHOOK: type: SHOW_GRANT -default vt1 user3 USER INSERT false -1 user1 -PREHOOK: query: explain authorization select * from vt1 -PREHOOK: type: QUERY -POSTHOOK: query: explain authorization select * from vt1 -POSTHOOK: type: QUERY -INPUTS: - default@vt1 - default@t1 -OUTPUTS: -#### A masked pattern was here #### -CURRENT_USER: - user2 -OPERATION: - QUERY -PREHOOK: query: select * from vt1 -PREHOOK: type: QUERY -PREHOOK: Input: default@t1 -PREHOOK: Input: default@vt1 -#### A masked pattern was here #### -POSTHOOK: query: select * from vt1 -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1 -POSTHOOK: Input: default@vt1 -#### A masked pattern was here #### -PREHOOK: query: -- verify input objects required does not include table --- even if view is within a sub query -select * from (select * from vt1) a -PREHOOK: type: QUERY -PREHOOK: Input: default@t1 -PREHOOK: Input: default@vt1 -#### A masked pattern was here #### -POSTHOOK: query: -- verify input objects required does not include table --- even if view is within a sub query -select * from (select * from vt1) a -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1 -POSTHOOK: Input: default@vt1 -#### A masked pattern was here #### -PREHOOK: query: select * from vt1 union all select * from vt1 -PREHOOK: type: QUERY -PREHOOK: Input: default@t1 -PREHOOK: Input: default@vt1 -#### A masked pattern was here #### -POSTHOOK: query: select * from vt1 union all select * from vt1 -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1 -POSTHOOK: Input: default@vt1 -#### A masked pattern was here #### -PREHOOK: query: grant all on table vt2 to user user2 -PREHOOK: type: GRANT_PRIVILEGE -PREHOOK: Output: default@vt2 -POSTHOOK: query: grant all on table vt2 to user user2 -POSTHOOK: type: GRANT_PRIVILEGE -POSTHOOK: Output: default@vt2 -PREHOOK: query: show grant user user2 on table vt2 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user2 on table vt2 -POSTHOOK: type: SHOW_GRANT -default vt2 user2 USER DELETE false -1 user1 -default vt2 user2 USER INSERT false -1 user1 -default vt2 user2 USER SELECT false -1 user1 -default vt2 user2 USER UPDATE false -1 user1 -PREHOOK: query: show grant user user2 on all -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user2 on all -POSTHOOK: type: SHOW_GRANT -default vt1 user2 USER SELECT false -1 user1 -default vt2 user2 USER DELETE false -1 user1 -default vt2 user2 USER INSERT false -1 user1 -default vt2 user2 USER SELECT false -1 user1 -default vt2 user2 USER UPDATE false -1 user1 -PREHOOK: query: revoke all on vt2 from user user2 -PREHOOK: type: REVOKE_PRIVILEGE -PREHOOK: Output: default@vt2 -POSTHOOK: query: revoke all on vt2 from user user2 -POSTHOOK: type: REVOKE_PRIVILEGE -POSTHOOK: Output: default@vt2 -PREHOOK: query: show grant user user2 on table vt2 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user2 on table vt2 -POSTHOOK: type: SHOW_GRANT -PREHOOK: query: set role admin -PREHOOK: type: SHOW_ROLES -POSTHOOK: query: set role admin -POSTHOOK: type: SHOW_ROLES -PREHOOK: query: show grant on table vt2 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant on table vt2 -POSTHOOK: type: SHOW_GRANT -default vt2 user1 USER DELETE true -1 user1 -default vt2 user1 USER INSERT true -1 user1 -default vt2 user1 USER SELECT true -1 user1 -default vt2 user1 USER UPDATE true -1 user1 -PREHOOK: query: revoke select on table vt1 from user user2 -PREHOOK: type: REVOKE_PRIVILEGE -PREHOOK: Output: default@vt1 -POSTHOOK: query: revoke select on table vt1 from user user2 -POSTHOOK: type: REVOKE_PRIVILEGE -POSTHOOK: Output: default@vt1 -PREHOOK: query: show grant user user2 on table vt1 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user2 on table vt1 -POSTHOOK: type: SHOW_GRANT -PREHOOK: query: show grant user user2 on all -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant user user2 on all -POSTHOOK: type: SHOW_GRANT -PREHOOK: query: -- grant privileges on roles for view, after next statement -show grant user user3 on table vt1 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: -- grant privileges on roles for view, after next statement -show grant user user3 on table vt1 -POSTHOOK: type: SHOW_GRANT -default vt1 user3 USER INSERT false -1 user1 -PREHOOK: query: show current roles -PREHOOK: type: SHOW_ROLES -POSTHOOK: query: show current roles -POSTHOOK: type: SHOW_ROLES -public -PREHOOK: query: set role ADMIN -PREHOOK: type: SHOW_ROLES -POSTHOOK: query: set role ADMIN -POSTHOOK: type: SHOW_ROLES -PREHOOK: query: create role role_v -PREHOOK: type: CREATEROLE -POSTHOOK: query: create role role_v -POSTHOOK: type: CREATEROLE -PREHOOK: query: grant role_v to user user4 -PREHOOK: type: GRANT_ROLE -POSTHOOK: query: grant role_v to user user4 -POSTHOOK: type: GRANT_ROLE -PREHOOK: query: show role grant user user4 -PREHOOK: type: SHOW_ROLE_GRANT -POSTHOOK: query: show role grant user user4 -POSTHOOK: type: SHOW_ROLE_GRANT -public false -1 -role_v false -1 hive_admin_user -PREHOOK: query: show roles -PREHOOK: type: SHOW_ROLES -POSTHOOK: query: show roles -POSTHOOK: type: SHOW_ROLES -admin -public -role_v -PREHOOK: query: grant all on table vt2 to role role_v -PREHOOK: type: GRANT_PRIVILEGE -PREHOOK: Output: default@vt2 -POSTHOOK: query: grant all on table vt2 to role role_v -POSTHOOK: type: GRANT_PRIVILEGE -POSTHOOK: Output: default@vt2 -PREHOOK: query: show grant role role_v on table vt2 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant role role_v on table vt2 -POSTHOOK: type: SHOW_GRANT -default vt2 role_v ROLE DELETE false -1 hive_admin_user -default vt2 role_v ROLE INSERT false -1 hive_admin_user -default vt2 role_v ROLE SELECT false -1 hive_admin_user -default vt2 role_v ROLE UPDATE false -1 hive_admin_user -PREHOOK: query: revoke delete on table vt2 from role role_v -PREHOOK: type: REVOKE_PRIVILEGE -PREHOOK: Output: default@vt2 -POSTHOOK: query: revoke delete on table vt2 from role role_v -POSTHOOK: type: REVOKE_PRIVILEGE -POSTHOOK: Output: default@vt2 -PREHOOK: query: show grant role role_v on table vt2 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant role role_v on table vt2 -POSTHOOK: type: SHOW_GRANT -default vt2 role_v ROLE INSERT false -1 hive_admin_user -default vt2 role_v ROLE SELECT false -1 hive_admin_user -default vt2 role_v ROLE UPDATE false -1 hive_admin_user -PREHOOK: query: show grant on table vt2 -PREHOOK: type: SHOW_GRANT -POSTHOOK: query: show grant on table vt2 -POSTHOOK: type: SHOW_GRANT -default vt2 role_v ROLE INSERT false -1 hive_admin_user -default vt2 role_v ROLE SELECT false -1 hive_admin_user -default vt2 role_v ROLE UPDATE false -1 hive_admin_user -default vt2 user1 USER DELETE true -1 user1 -default vt2 user1 USER INSERT true -1 user1 -default vt2 user1 USER SELECT true -1 user1 -default vt2 user1 USER UPDATE true -1 user1 diff --git ql/src/test/results/clientpositive/materialized_view_authorization_sqlstd.q.out ql/src/test/results/clientpositive/materialized_view_authorization_sqlstd.q.out new file mode 100644 index 0000000..e723f61 --- /dev/null +++ ql/src/test/results/clientpositive/materialized_view_authorization_sqlstd.q.out @@ -0,0 +1,195 @@ +PREHOOK: query: create table amvs_table (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@amvs_table +POSTHOOK: query: create table amvs_table (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvs_table +PREHOOK: query: insert into amvs_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@amvs_table +POSTHOOK: query: insert into amvs_table values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@amvs_table +POSTHOOK: Lineage: amvs_table.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: amvs_table.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: amvs_table.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view amvs_mat_view as select a, c from amvs_table +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@amvs_table +PREHOOK: Output: database:default +PREHOOK: Output: default@amvs_mat_view +POSTHOOK: query: create materialized view amvs_mat_view as select a, c from amvs_table +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@amvs_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvs_mat_view +PREHOOK: query: show grant user user1 on table amvs_mat_view +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user1 on table amvs_mat_view +POSTHOOK: type: SHOW_GRANT +default amvs_mat_view user1 USER DELETE true -1 user1 +default amvs_mat_view user1 USER INSERT true -1 user1 +default amvs_mat_view user1 USER SELECT true -1 user1 +default amvs_mat_view user1 USER UPDATE true -1 user1 +PREHOOK: query: grant select on amvs_mat_view to user user2 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@amvs_mat_view +POSTHOOK: query: grant select on amvs_mat_view to user user2 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@amvs_mat_view +PREHOOK: query: show grant user user2 on table amvs_mat_view +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table amvs_mat_view +POSTHOOK: type: SHOW_GRANT +default amvs_mat_view user2 USER SELECT false -1 user1 +PREHOOK: query: select * from amvs_mat_view +PREHOOK: type: QUERY +PREHOOK: Input: default@amvs_mat_view +#### A masked pattern was here #### +POSTHOOK: query: select * from amvs_mat_view +POSTHOOK: type: QUERY +POSTHOOK: Input: default@amvs_mat_view +#### A masked pattern was here #### +1 10.30 +2 3.14 +2 172342.20 +3 978.76 +3 9.80 +PREHOOK: query: show grant user user3 on table amvs_mat_view +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user3 on table amvs_mat_view +POSTHOOK: type: SHOW_GRANT +PREHOOK: query: set role admin +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role admin +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: show grant on table amvs_mat_view +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant on table amvs_mat_view +POSTHOOK: type: SHOW_GRANT +default amvs_mat_view user1 USER DELETE true -1 user1 +default amvs_mat_view user1 USER INSERT true -1 user1 +default amvs_mat_view user1 USER SELECT true -1 user1 +default amvs_mat_view user1 USER UPDATE true -1 user1 +default amvs_mat_view user2 USER SELECT false -1 user1 +PREHOOK: query: revoke select on table amvs_mat_view from user user2 +PREHOOK: type: REVOKE_PRIVILEGE +PREHOOK: Output: default@amvs_mat_view +POSTHOOK: query: revoke select on table amvs_mat_view from user user2 +POSTHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: Output: default@amvs_mat_view +PREHOOK: query: show grant user user2 on table amvs_mat_view +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table amvs_mat_view +POSTHOOK: type: SHOW_GRANT +PREHOOK: query: set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role ADMIN +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: create role role_v +PREHOOK: type: CREATEROLE +POSTHOOK: query: create role role_v +POSTHOOK: type: CREATEROLE +PREHOOK: query: grant role_v to user user4 +PREHOOK: type: GRANT_ROLE +POSTHOOK: query: grant role_v to user user4 +POSTHOOK: type: GRANT_ROLE +PREHOOK: query: show role grant user user4 +PREHOOK: type: SHOW_ROLE_GRANT +POSTHOOK: query: show role grant user user4 +POSTHOOK: type: SHOW_ROLE_GRANT +public false -1 +role_v false -1 hive_admin_user +PREHOOK: query: show roles +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: show roles +POSTHOOK: type: SHOW_ROLES +admin +public +role_v +PREHOOK: query: grant all on table amvs_mat_view to role role_v +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@amvs_mat_view +POSTHOOK: query: grant all on table amvs_mat_view to role role_v +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@amvs_mat_view +PREHOOK: query: show grant role role_v on table amvs_mat_view +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant role role_v on table amvs_mat_view +POSTHOOK: type: SHOW_GRANT +default amvs_mat_view role_v ROLE DELETE false -1 hive_admin_user +default amvs_mat_view role_v ROLE INSERT false -1 hive_admin_user +default amvs_mat_view role_v ROLE SELECT false -1 hive_admin_user +default amvs_mat_view role_v ROLE UPDATE false -1 hive_admin_user +PREHOOK: query: show grant user user4 on table amvs_mat_view +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user4 on table amvs_mat_view +POSTHOOK: type: SHOW_GRANT +PREHOOK: query: select * from amvs_mat_view +PREHOOK: type: QUERY +PREHOOK: Input: default@amvs_mat_view +#### A masked pattern was here #### +POSTHOOK: query: select * from amvs_mat_view +POSTHOOK: type: QUERY +POSTHOOK: Input: default@amvs_mat_view +#### A masked pattern was here #### +1 10.30 +2 3.14 +2 172342.20 +3 978.76 +3 9.80 +PREHOOK: query: alter materialized view amvs_mat_view rebuild +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@amvs_table +PREHOOK: Output: database:default +PREHOOK: Output: default@amvs_mat_view +POSTHOOK: query: alter materialized view amvs_mat_view rebuild +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@amvs_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvs_mat_view +PREHOOK: query: grant select on table amvs_table to user user2 with grant option +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@amvs_table +POSTHOOK: query: grant select on table amvs_table to user user2 with grant option +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@amvs_table +PREHOOK: query: create materialized view amvs_mat_view2 as select a, b from amvs_table +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@amvs_table +PREHOOK: Output: database:default +PREHOOK: Output: default@amvs_mat_view2 +POSTHOOK: query: create materialized view amvs_mat_view2 as select a, b from amvs_table +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@amvs_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@amvs_mat_view2 +PREHOOK: query: select * from amvs_mat_view2 +PREHOOK: type: QUERY +PREHOOK: Input: default@amvs_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select * from amvs_mat_view2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@amvs_mat_view2 +#### A masked pattern was here #### +1 alfred +2 bob +2 bonnie +3 calvin +3 charlie +PREHOOK: query: drop materialized view amvs_mat_view2 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@amvs_mat_view2 +PREHOOK: Output: default@amvs_mat_view2 +POSTHOOK: query: drop materialized view amvs_mat_view2 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@amvs_mat_view2 +POSTHOOK: Output: default@amvs_mat_view2 +PREHOOK: query: set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role ADMIN +POSTHOOK: type: SHOW_ROLES diff --git ql/src/test/results/clientpositive/materialized_view_create.q.out ql/src/test/results/clientpositive/materialized_view_create.q.out new file mode 100644 index 0000000..c2c6a15 --- /dev/null +++ ql/src/test/results/clientpositive/materialized_view_create.q.out @@ -0,0 +1,170 @@ +PREHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_basetable +PREHOOK: query: insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@cmv_basetable +POSTHOOK: Lineage: cmv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view cmv_mat_view as select a, b, c from cmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view +POSTHOOK: query: create materialized view cmv_mat_view as select a, b, c from cmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view +PREHOOK: query: select * from cmv_mat_view +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view +#### A masked pattern was here #### +1 alfred 10.30 +2 bob 3.14 +2 bonnie 172342.20 +3 calvin 978.76 +3 charlie 9.80 +PREHOOK: query: create materialized view if not exists cmv_mat_view2 as select a, c from cmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: create materialized view if not exists cmv_mat_view2 as select a, c from cmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view2 +PREHOOK: query: select * from cmv_mat_view2 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +1 10.30 +2 3.14 +2 172342.20 +3 978.76 +3 9.80 +PREHOOK: query: create materialized view if not exists cmv_mat_view3 as select * from cmv_basetable where a > 1 +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view3 +POSTHOOK: query: create materialized view if not exists cmv_mat_view3 as select * from cmv_basetable where a > 1 +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view3 +PREHOOK: query: select * from cmv_mat_view3 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view3 +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view3 +#### A masked pattern was here #### +2 bob 3.14 +2 bonnie 172342.20 +3 calvin 978.76 +3 charlie 9.80 +PREHOOK: query: create materialized view cmv_mat_view4 comment 'this is a comment' as select a, sum(c) from cmv_basetable group by a +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view4 +POSTHOOK: query: create materialized view cmv_mat_view4 comment 'this is a comment' as select a, sum(c) from cmv_basetable group by a +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view4 +PREHOOK: query: select * from cmv_mat_view4 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view4 +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view4 +#### A masked pattern was here #### +1 10.30 +2 172345.34 +3 988.56 +PREHOOK: query: describe extended cmv_mat_view4 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@cmv_mat_view4 +POSTHOOK: query: describe extended cmv_mat_view4 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@cmv_mat_view4 +a int +c1 decimal(20,2) + +#### A masked pattern was here #### +PREHOOK: query: create table cmv_basetable2 (d int, e varchar(256), f decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_basetable2 +POSTHOOK: query: create table cmv_basetable2 (d int, e varchar(256), f decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_basetable2 +PREHOOK: query: insert into cmv_basetable2 values (4, 'alfred', 100.30),(4, 'bob', 6133,14),(5, 'bonnie', 172.2),(6, 'calvin', 8.76),(17, 'charlie', 13144339.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__2 +PREHOOK: Output: default@cmv_basetable2 +POSTHOOK: query: insert into cmv_basetable2 values (4, 'alfred', 100.30),(4, 'bob', 6133,14),(5, 'bonnie', 172.2),(6, 'calvin', 8.76),(17, 'charlie', 13144339.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__2 +POSTHOOK: Output: default@cmv_basetable2 +POSTHOOK: Lineage: cmv_basetable2.d EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable2.e EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable2.f EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view cmv_mat_view5 tblproperties ('key'='value') as select a, b, d, c, f from cmv_basetable t1 join cmv_basetable2 t2 on (t1.b = t2.e) +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Input: default@cmv_basetable2 +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view5 +POSTHOOK: query: create materialized view cmv_mat_view5 tblproperties ('key'='value') as select a, b, d, c, f from cmv_basetable t1 join cmv_basetable2 t2 on (t1.b = t2.e) +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Input: default@cmv_basetable2 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view5 +PREHOOK: query: select * from cmv_mat_view5 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view5 +#### A masked pattern was here #### +POSTHOOK: query: select * from cmv_mat_view5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view5 +#### A masked pattern was here #### +1 alfred 4 10.30 100.30 +2 bob 4 3.14 6133.00 +2 bonnie 5 172342.20 172.20 +3 calvin 6 978.76 8.76 +3 charlie 17 9.80 13144339.80 +PREHOOK: query: show tblproperties cmv_mat_view5 +PREHOOK: type: SHOW_TBLPROPERTIES +POSTHOOK: query: show tblproperties cmv_mat_view5 +POSTHOOK: type: SHOW_TBLPROPERTIES +key value +numFiles 1 +totalSize 710 +#### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/materialized_view_describe.q.out ql/src/test/results/clientpositive/materialized_view_describe.q.out new file mode 100644 index 0000000..b57a55f --- /dev/null +++ ql/src/test/results/clientpositive/materialized_view_describe.q.out @@ -0,0 +1,375 @@ +PREHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_basetable +PREHOOK: query: insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@cmv_basetable +POSTHOOK: query: insert into cmv_basetable values (1, 'alfred', 10.30),(2, 'bob', 3.14),(2, 'bonnie', 172342.2),(3, 'calvin', 978.76),(3, 'charlie', 9.8) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@cmv_basetable +POSTHOOK: Lineage: cmv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: cmv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +PREHOOK: query: create materialized view cmv_mat_view +comment 'this is the first view' +tblproperties ('key'='foo') as select a, c from cmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view +POSTHOOK: query: create materialized view cmv_mat_view +comment 'this is the first view' +tblproperties ('key'='foo') as select a, c from cmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view +PREHOOK: query: describe cmv_mat_view +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@cmv_mat_view +POSTHOOK: query: describe cmv_mat_view +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@cmv_mat_view +a int +c decimal(10,2) +PREHOOK: query: describe extended cmv_mat_view +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@cmv_mat_view +POSTHOOK: query: describe extended cmv_mat_view +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@cmv_mat_view +a int +c decimal(10,2) + +#### A masked pattern was here #### +PREHOOK: query: describe formatted cmv_mat_view +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@cmv_mat_view +POSTHOOK: query: describe formatted cmv_mat_view +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@cmv_mat_view +# col_name data_type comment + +a int +c decimal(10,2) + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MATERIALIZED_VIEW +Table Parameters: + comment this is the first view + key foo + numFiles 1 + totalSize 346 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +PREHOOK: query: show tblproperties cmv_mat_view +PREHOOK: type: SHOW_TBLPROPERTIES +POSTHOOK: query: show tblproperties cmv_mat_view +POSTHOOK: type: SHOW_TBLPROPERTIES +comment this is the first view +key foo +numFiles 1 +totalSize 346 +#### A masked pattern was here #### +PREHOOK: query: select a, c from cmv_mat_view +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view +#### A masked pattern was here #### +POSTHOOK: query: select a, c from cmv_mat_view +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view +#### A masked pattern was here #### +1 10.30 +2 3.14 +2 172342.20 +3 978.76 +3 9.80 +PREHOOK: query: drop materialized view cmv_mat_view +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_mat_view +PREHOOK: Output: default@cmv_mat_view +POSTHOOK: query: drop materialized view cmv_mat_view +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_mat_view +POSTHOOK: Output: default@cmv_mat_view +PREHOOK: query: create materialized view cmv_mat_view2 +comment 'this is the second view' +stored as textfile +tblproperties ('key'='alice','key2'='bob') as select a from cmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: create materialized view cmv_mat_view2 +comment 'this is the second view' +stored as textfile +tblproperties ('key'='alice','key2'='bob') as select a from cmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view2 +PREHOOK: query: describe formatted cmv_mat_view2 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@cmv_mat_view2 +POSTHOOK: query: describe formatted cmv_mat_view2 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@cmv_mat_view2 +# col_name data_type comment + +a int + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MATERIALIZED_VIEW +Table Parameters: + comment this is the second view + key alice + key2 bob + numFiles 1 + totalSize 10 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +PREHOOK: query: select a from cmv_mat_view2 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +POSTHOOK: query: select a from cmv_mat_view2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view2 +#### A masked pattern was here #### +1 +2 +2 +3 +3 +PREHOOK: query: drop materialized view cmv_mat_view2 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_mat_view2 +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: drop materialized view cmv_mat_view2 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_mat_view2 +POSTHOOK: Output: default@cmv_mat_view2 +PREHOOK: query: create materialized view cmv_mat_view3 +comment 'this is the third view' +row format + delimited fields terminated by '\t' +as select * from cmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view3 +POSTHOOK: query: create materialized view cmv_mat_view3 +comment 'this is the third view' +row format + delimited fields terminated by '\t' +as select * from cmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view3 +PREHOOK: query: describe formatted cmv_mat_view3 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@cmv_mat_view3 +POSTHOOK: query: describe formatted cmv_mat_view3 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@cmv_mat_view3 +# col_name data_type comment + +a int +b varchar(256) +c decimal(10,2) + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MATERIALIZED_VIEW +Table Parameters: + comment this is the third view + numFiles 1 + totalSize 497 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +PREHOOK: query: select a, b, c from cmv_mat_view3 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view3 +#### A masked pattern was here #### +POSTHOOK: query: select a, b, c from cmv_mat_view3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view3 +#### A masked pattern was here #### +1 alfred 10.30 +2 bob 3.14 +2 bonnie 172342.20 +3 calvin 978.76 +3 charlie 9.80 +PREHOOK: query: select distinct a from cmv_mat_view3 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view3 +#### A masked pattern was here #### +POSTHOOK: query: select distinct a from cmv_mat_view3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view3 +#### A masked pattern was here #### +1 +2 +3 +PREHOOK: query: drop materialized view cmv_mat_view3 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_mat_view3 +PREHOOK: Output: default@cmv_mat_view3 +POSTHOOK: query: drop materialized view cmv_mat_view3 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_mat_view3 +POSTHOOK: Output: default@cmv_mat_view3 +PREHOOK: query: create materialized view cmv_mat_view4 +comment 'this is the last view' +stored as textfile +#### A masked pattern was here #### +as select a from cmv_basetable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +#### A masked pattern was here #### +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view4 +POSTHOOK: query: create materialized view cmv_mat_view4 +comment 'this is the last view' +stored as textfile +#### A masked pattern was here #### +as select a from cmv_basetable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +#### A masked pattern was here #### +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view4 +PREHOOK: query: describe formatted cmv_mat_view4 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@cmv_mat_view4 +POSTHOOK: query: describe formatted cmv_mat_view4 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@cmv_mat_view4 +# col_name data_type comment + +a int + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MATERIALIZED_VIEW +Table Parameters: + comment this is the last view + numFiles 1 + totalSize 10 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +PREHOOK: query: alter materialized view cmv_mat_view4 rebuild +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_basetable +PREHOOK: Output: database:default +PREHOOK: Output: default@cmv_mat_view4 +POSTHOOK: query: alter materialized view cmv_mat_view4 rebuild +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_basetable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@cmv_mat_view4 +PREHOOK: query: describe formatted cmv_mat_view4 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@cmv_mat_view4 +POSTHOOK: query: describe formatted cmv_mat_view4 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@cmv_mat_view4 +# col_name data_type comment + +a int + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MATERIALIZED_VIEW +Table Parameters: + comment this is the last view + numFiles 1 + totalSize 10 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +PREHOOK: query: select a from cmv_mat_view4 +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_mat_view4 +#### A masked pattern was here #### +POSTHOOK: query: select a from cmv_mat_view4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_mat_view4 +#### A masked pattern was here #### +1 +2 +2 +3 +3 +PREHOOK: query: drop materialized view cmv_mat_view4 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@cmv_mat_view4 +PREHOOK: Output: default@cmv_mat_view4 +POSTHOOK: query: drop materialized view cmv_mat_view4 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@cmv_mat_view4 +POSTHOOK: Output: default@cmv_mat_view4 diff --git ql/src/test/results/clientpositive/materialized_view_drop.q.out ql/src/test/results/clientpositive/materialized_view_drop.q.out new file mode 100644 index 0000000..3cf13d0 --- /dev/null +++ ql/src/test/results/clientpositive/materialized_view_drop.q.out @@ -0,0 +1,39 @@ +PREHOOK: query: create materialized view dmv_mat_view as select cint, cstring1 from alltypesorc where cint < 0 +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: database:default +PREHOOK: Output: default@dmv_mat_view +POSTHOOK: query: create materialized view dmv_mat_view as select cint, cstring1 from alltypesorc where cint < 0 +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: database:default +POSTHOOK: Output: default@dmv_mat_view +PREHOOK: query: show table extended like dmv_mat_view +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like dmv_mat_view +POSTHOOK: type: SHOW_TABLESTATUS +tableName:dmv_mat_view +#### A masked pattern was here #### +inputformat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +outputformat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +columns:struct columns { i32 cint, string cstring1} +partitioned:false +partitionColumns: +totalNumberFiles:1 +totalFileSize:47140 +maxFileSize:47140 +minFileSize:47140 +#### A masked pattern was here #### + +PREHOOK: query: drop materialized view dmv_mat_view +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: default@dmv_mat_view +PREHOOK: Output: default@dmv_mat_view +POSTHOOK: query: drop materialized view dmv_mat_view +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: default@dmv_mat_view +POSTHOOK: Output: default@dmv_mat_view +PREHOOK: query: show table extended like dmv_mat_view +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like dmv_mat_view +POSTHOOK: type: SHOW_TABLESTATUS diff --git ql/src/test/results/clientpositive/materialized_view_rebuild.q.out ql/src/test/results/clientpositive/materialized_view_rebuild.q.out new file mode 100644 index 0000000..cff0a61 --- /dev/null +++ ql/src/test/results/clientpositive/materialized_view_rebuild.q.out @@ -0,0 +1,67 @@ +PREHOOK: query: create table rmv_table (cint int, cstring1 string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@rmv_table +POSTHOOK: query: create table rmv_table (cint int, cstring1 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@rmv_table +PREHOOK: query: insert into rmv_table values(1, 'fred'), (10, 'wilma') +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@rmv_table +POSTHOOK: query: insert into rmv_table values(1, 'fred'), (10, 'wilma') +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@rmv_table +POSTHOOK: Lineage: rmv_table.cint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: rmv_table.cstring1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +PREHOOK: query: create materialized view rmv_mat_view as select cint, cstring1 from rmv_table where cint < 10 +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@rmv_table +PREHOOK: Output: database:default +PREHOOK: Output: default@rmv_mat_view +POSTHOOK: query: create materialized view rmv_mat_view as select cint, cstring1 from rmv_table where cint < 10 +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@rmv_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@rmv_mat_view +PREHOOK: query: select * from rmv_mat_view +PREHOOK: type: QUERY +PREHOOK: Input: default@rmv_mat_view +#### A masked pattern was here #### +POSTHOOK: query: select * from rmv_mat_view +POSTHOOK: type: QUERY +POSTHOOK: Input: default@rmv_mat_view +#### A masked pattern was here #### +1 fred +PREHOOK: query: insert into rmv_table values(2, 'barney'), (11, 'betty') +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__2 +PREHOOK: Output: default@rmv_table +POSTHOOK: query: insert into rmv_table values(2, 'barney'), (11, 'betty') +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__2 +POSTHOOK: Output: default@rmv_table +POSTHOOK: Lineage: rmv_table.cint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: rmv_table.cstring1 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +PREHOOK: query: alter materialized view rmv_mat_view rebuild +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@rmv_table +PREHOOK: Output: database:default +PREHOOK: Output: default@rmv_mat_view +POSTHOOK: query: alter materialized view rmv_mat_view rebuild +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@rmv_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@rmv_mat_view +PREHOOK: query: select * from rmv_mat_view +PREHOOK: type: QUERY +PREHOOK: Input: default@rmv_mat_view +#### A masked pattern was here #### +POSTHOOK: query: select * from rmv_mat_view +POSTHOOK: type: QUERY +POSTHOOK: Input: default@rmv_mat_view +#### A masked pattern was here #### +1 fred +2 barney diff --git ql/src/test/results/clientpositive/view_authorization_sqlstd.q.out ql/src/test/results/clientpositive/view_authorization_sqlstd.q.out new file mode 100644 index 0000000..122ca4e --- /dev/null +++ ql/src/test/results/clientpositive/view_authorization_sqlstd.q.out @@ -0,0 +1,295 @@ +PREHOOK: query: -- Test view authorization , and 'show grant' variants + +create table t1(i int, j int, k int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t1 +POSTHOOK: query: -- Test view authorization , and 'show grant' variants + +create table t1(i int, j int, k int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t1 +PREHOOK: query: grant select on t1 to user user2 with grant option +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@t1 +POSTHOOK: query: grant select on t1 to user user2 with grant option +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@t1 +PREHOOK: query: show grant user user1 on table t1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user1 on table t1 +POSTHOOK: type: SHOW_GRANT +default t1 user1 USER DELETE true -1 user1 +default t1 user1 USER INSERT true -1 user1 +default t1 user1 USER SELECT true -1 user1 +default t1 user1 USER UPDATE true -1 user1 +PREHOOK: query: -- protecting certain columns +create view vt1 as select i,k from t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@t1 +PREHOOK: Output: database:default +PREHOOK: Output: default@vt1 +POSTHOOK: query: -- protecting certain columns +create view vt1 as select i,k from t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@t1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vt1 +PREHOOK: query: -- protecting certain rows +create view vt2 as select * from t1 where i > 1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@t1 +PREHOOK: Output: database:default +PREHOOK: Output: default@vt2 +POSTHOOK: query: -- protecting certain rows +create view vt2 as select * from t1 where i > 1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@t1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vt2 +PREHOOK: query: show grant user user1 on all +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user1 on all +POSTHOOK: type: SHOW_GRANT +default t1 user1 USER DELETE true -1 user1 +default t1 user1 USER INSERT true -1 user1 +default t1 user1 USER SELECT true -1 user1 +default t1 user1 USER UPDATE true -1 user1 +default vt1 user1 USER DELETE true -1 user1 +default vt1 user1 USER INSERT true -1 user1 +default vt1 user1 USER SELECT true -1 user1 +default vt1 user1 USER UPDATE true -1 user1 +default vt2 user1 USER DELETE true -1 user1 +default vt2 user1 USER INSERT true -1 user1 +default vt2 user1 USER SELECT true -1 user1 +default vt2 user1 USER UPDATE true -1 user1 +PREHOOK: query: --view grant to user +-- try with and without table keyword + +grant select on vt1 to user user2 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@vt1 +POSTHOOK: query: --view grant to user +-- try with and without table keyword + +grant select on vt1 to user user2 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@vt1 +PREHOOK: query: grant insert on table vt1 to user user3 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@vt1 +POSTHOOK: query: grant insert on table vt1 to user user3 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@vt1 +PREHOOK: query: show grant user user2 on table vt1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table vt1 +POSTHOOK: type: SHOW_GRANT +default vt1 user2 USER SELECT false -1 user1 +PREHOOK: query: create view vt3 as select i,k from t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@t1 +PREHOOK: Output: database:default +PREHOOK: Output: default@vt3 +POSTHOOK: query: create view vt3 as select i,k from t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@t1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vt3 +PREHOOK: query: show grant user user3 on table vt1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user3 on table vt1 +POSTHOOK: type: SHOW_GRANT +default vt1 user3 USER INSERT false -1 user1 +PREHOOK: query: explain authorization select * from vt1 +PREHOOK: type: QUERY +POSTHOOK: query: explain authorization select * from vt1 +POSTHOOK: type: QUERY +INPUTS: + default@vt1 + default@t1 +OUTPUTS: +#### A masked pattern was here #### +CURRENT_USER: + user2 +OPERATION: + QUERY +PREHOOK: query: select * from vt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@t1 +PREHOOK: Input: default@vt1 +#### A masked pattern was here #### +POSTHOOK: query: select * from vt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 +POSTHOOK: Input: default@vt1 +#### A masked pattern was here #### +PREHOOK: query: -- verify input objects required does not include table +-- even if view is within a sub query +select * from (select * from vt1) a +PREHOOK: type: QUERY +PREHOOK: Input: default@t1 +PREHOOK: Input: default@vt1 +#### A masked pattern was here #### +POSTHOOK: query: -- verify input objects required does not include table +-- even if view is within a sub query +select * from (select * from vt1) a +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 +POSTHOOK: Input: default@vt1 +#### A masked pattern was here #### +PREHOOK: query: select * from vt1 union all select * from vt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@t1 +PREHOOK: Input: default@vt1 +#### A masked pattern was here #### +POSTHOOK: query: select * from vt1 union all select * from vt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 +POSTHOOK: Input: default@vt1 +#### A masked pattern was here #### +PREHOOK: query: grant all on table vt2 to user user2 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@vt2 +POSTHOOK: query: grant all on table vt2 to user user2 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@vt2 +PREHOOK: query: show grant user user2 on table vt2 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table vt2 +POSTHOOK: type: SHOW_GRANT +default vt2 user2 USER DELETE false -1 user1 +default vt2 user2 USER INSERT false -1 user1 +default vt2 user2 USER SELECT false -1 user1 +default vt2 user2 USER UPDATE false -1 user1 +PREHOOK: query: show grant user user2 on all +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on all +POSTHOOK: type: SHOW_GRANT +default t1 user2 USER SELECT true -1 user1 +default vt1 user2 USER SELECT false -1 user1 +default vt2 user2 USER DELETE false -1 user1 +default vt2 user2 USER INSERT false -1 user1 +default vt2 user2 USER SELECT false -1 user1 +default vt2 user2 USER UPDATE false -1 user1 +default vt3 user2 USER DELETE true -1 user1 +default vt3 user2 USER INSERT true -1 user1 +default vt3 user2 USER SELECT true -1 user1 +default vt3 user2 USER UPDATE true -1 user1 +PREHOOK: query: revoke all on vt2 from user user2 +PREHOOK: type: REVOKE_PRIVILEGE +PREHOOK: Output: default@vt2 +POSTHOOK: query: revoke all on vt2 from user user2 +POSTHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: Output: default@vt2 +PREHOOK: query: show grant user user2 on table vt2 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table vt2 +POSTHOOK: type: SHOW_GRANT +PREHOOK: query: set role admin +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role admin +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: show grant on table vt2 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant on table vt2 +POSTHOOK: type: SHOW_GRANT +default vt2 user1 USER DELETE true -1 user1 +default vt2 user1 USER INSERT true -1 user1 +default vt2 user1 USER SELECT true -1 user1 +default vt2 user1 USER UPDATE true -1 user1 +PREHOOK: query: revoke select on table vt1 from user user2 +PREHOOK: type: REVOKE_PRIVILEGE +PREHOOK: Output: default@vt1 +POSTHOOK: query: revoke select on table vt1 from user user2 +POSTHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: Output: default@vt1 +PREHOOK: query: show grant user user2 on table vt1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table vt1 +POSTHOOK: type: SHOW_GRANT +PREHOOK: query: show grant user user2 on all +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on all +POSTHOOK: type: SHOW_GRANT +default t1 user2 USER SELECT true -1 user1 +default vt3 user2 USER DELETE true -1 user1 +default vt3 user2 USER INSERT true -1 user1 +default vt3 user2 USER SELECT true -1 user1 +default vt3 user2 USER UPDATE true -1 user1 +PREHOOK: query: -- grant privileges on roles for view, after next statement +show grant user user3 on table vt1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: -- grant privileges on roles for view, after next statement +show grant user user3 on table vt1 +POSTHOOK: type: SHOW_GRANT +default vt1 user3 USER INSERT false -1 user1 +PREHOOK: query: show current roles +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: show current roles +POSTHOOK: type: SHOW_ROLES +public +PREHOOK: query: set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role ADMIN +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: create role role_v +PREHOOK: type: CREATEROLE +POSTHOOK: query: create role role_v +POSTHOOK: type: CREATEROLE +PREHOOK: query: grant role_v to user user4 +PREHOOK: type: GRANT_ROLE +POSTHOOK: query: grant role_v to user user4 +POSTHOOK: type: GRANT_ROLE +PREHOOK: query: show role grant user user4 +PREHOOK: type: SHOW_ROLE_GRANT +POSTHOOK: query: show role grant user user4 +POSTHOOK: type: SHOW_ROLE_GRANT +public false -1 +role_v false -1 hive_admin_user +PREHOOK: query: show roles +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: show roles +POSTHOOK: type: SHOW_ROLES +admin +public +role_v +PREHOOK: query: grant all on table vt2 to role role_v +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@vt2 +POSTHOOK: query: grant all on table vt2 to role role_v +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@vt2 +PREHOOK: query: show grant role role_v on table vt2 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant role role_v on table vt2 +POSTHOOK: type: SHOW_GRANT +default vt2 role_v ROLE DELETE false -1 hive_admin_user +default vt2 role_v ROLE INSERT false -1 hive_admin_user +default vt2 role_v ROLE SELECT false -1 hive_admin_user +default vt2 role_v ROLE UPDATE false -1 hive_admin_user +PREHOOK: query: revoke delete on table vt2 from role role_v +PREHOOK: type: REVOKE_PRIVILEGE +PREHOOK: Output: default@vt2 +POSTHOOK: query: revoke delete on table vt2 from role role_v +POSTHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: Output: default@vt2 +PREHOOK: query: show grant role role_v on table vt2 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant role role_v on table vt2 +POSTHOOK: type: SHOW_GRANT +default vt2 role_v ROLE INSERT false -1 hive_admin_user +default vt2 role_v ROLE SELECT false -1 hive_admin_user +default vt2 role_v ROLE UPDATE false -1 hive_admin_user +PREHOOK: query: show grant on table vt2 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant on table vt2 +POSTHOOK: type: SHOW_GRANT +default vt2 role_v ROLE INSERT false -1 hive_admin_user +default vt2 role_v ROLE SELECT false -1 hive_admin_user +default vt2 role_v ROLE UPDATE false -1 hive_admin_user +default vt2 user1 USER DELETE true -1 user1 +default vt2 user1 USER INSERT true -1 user1 +default vt2 user1 USER SELECT true -1 user1 +default vt2 user1 USER UPDATE true -1 user1 diff --git service/src/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java service/src/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java index 5f8cc1e..86417a4 100644 --- service/src/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java +++ service/src/java/org/apache/hive/service/cli/operation/ClassicTableTypeMapping.java @@ -47,6 +47,7 @@ public enum ClassicTableTypes { TABLE, VIEW, + MATERIALIZED_VIEW, } private final Map hiveToClientMap = new HashMap(); @@ -56,10 +57,14 @@ public ClassicTableTypeMapping () { hiveToClientMap.put(TableType.MANAGED_TABLE.name(), ClassicTableTypes.TABLE.name()); hiveToClientMap.put(TableType.EXTERNAL_TABLE.name(), ClassicTableTypes.TABLE.name()); hiveToClientMap.put(TableType.VIRTUAL_VIEW.name(), ClassicTableTypes.VIEW.name()); + hiveToClientMap.put(TableType.MATERIALIZED_VIEW.toString(), + ClassicTableTypes.MATERIALIZED_VIEW.toString()); clientToHiveMap.putAll(ClassicTableTypes.TABLE.name(), Arrays.asList( TableType.MANAGED_TABLE.name(), TableType.EXTERNAL_TABLE.name())); clientToHiveMap.put(ClassicTableTypes.VIEW.name(), TableType.VIRTUAL_VIEW.name()); + clientToHiveMap.put(ClassicTableTypes.MATERIALIZED_VIEW.toString(), + TableType.MATERIALIZED_VIEW.toString()); } @Override