diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 0a34633fa4..e117360684 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -21,15 +21,6 @@ import static org.apache.commons.lang.StringUtils.join; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE; -import java.util.concurrent.ExecutionException; - -import com.google.common.util.concurrent.FutureCallback; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; import java.io.BufferedWriter; import java.io.DataOutputStream; import java.io.FileNotFoundException; @@ -57,6 +48,8 @@ import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; +import java.util.concurrent.ExecutionException; + import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; @@ -172,6 +165,7 @@ import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; +import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc; import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; @@ -282,6 +276,11 @@ import org.slf4j.LoggerFactory; import org.stringtemplate.v4.ST; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.util.concurrent.ListenableFuture; + /** * DDLTask implementation. * @@ -649,6 +648,11 @@ public int execute(DriverContext driverContext) { if (work.getDropWMTriggerDesc() != null) { return dropWMTrigger(db, work.getDropWMTriggerDesc()); } + + if (work.getAlterMaterializedViewDesc() != null) { + return alterMaterializedView(db, work.getAlterMaterializedViewDesc()); + } + } catch (Throwable e) { failed(e); return 1; @@ -1299,6 +1303,56 @@ private int alterIndex(Hive db, AlterIndexDesc alterIndex) throws HiveException } /** + * Alters a materialized view. + * + * @param db + * Database that the materialized view belongs to. + * @param alterMVDesc + * Descriptor of the changes. + * @return Returns 0 when execution succeeds and above 0 if it fails. + * @throws HiveException + * @throws InvalidOperationException + */ + private int alterMaterializedView(Hive db, AlterMaterializedViewDesc alterMVDesc) throws HiveException { + + String mvName = alterMVDesc.getMaterializedViewName(); + Table oldMV = db.getTable(mvName); + Table mv = oldMV.copy(); // Do not mess with Table instance + EnvironmentContext environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); + + switch (alterMVDesc.getOp()) { + case UPDATE_REWRITE_FLAG: + LOG.info("Jesus - I am going to change " + mv.getFullyQualifiedName() + " to " + alterMVDesc.isRewriteEnable()); + LOG.info("Jesus - Previous value was " + mv.isRewriteEnabled()); + if (mv.isRewriteEnabled() == alterMVDesc.isRewriteEnable()) { + // This is a noop, return successfully + return 0; + } + mv.setRewriteEnabled(alterMVDesc.isRewriteEnable()); + break; + + default: + throw new AssertionError("Unsupported alter materialized view type! : " + alterMVDesc.getOp()); + } + + try { + db.alterTable(mv, environmentContext); + // Remove or add to materialized view rewriting cache + if (alterMVDesc.isRewriteEnable()) { + LOG.info("Jesus - adding the view"); + HiveMaterializedViewsRegistry.get().addMaterializedView(mv); + } else { + LOG.info("Jesus - removing the view"); + HiveMaterializedViewsRegistry.get().dropMaterializedView(oldMV); + } + } catch (InvalidOperationException e) { + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to alter " + mv.getFullyQualifiedName()); + } + return 0; + } + + /** * Add a partitions to a table. * * @param db diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 579f2df280..3a6f84b503 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -86,6 +86,8 @@ import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; +import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc; +import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc.AlterMaterializedViewTypes; import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; @@ -457,6 +459,16 @@ public void analyzeInternal(ASTNode input) throws SemanticException { } break; } + case HiveParser.TOK_ALTER_MATERIALIZED_VIEW: { + ast = (ASTNode) input.getChild(1); + String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0)); + String tableName = getDotName(qualified); + + if (ast.getType() == HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REWRITE) { + analyzeAlterMaterializedViewRewrite(tableName, ast); + } + break; + } case HiveParser.TOK_ALTERINDEX_REBUILD: analyzeAlterIndexRebuild(ast); break; @@ -4002,4 +4014,30 @@ private HiveAuthorizationTaskFactory createAuthorizationTaskFactory(HiveConf con } } + private void analyzeAlterMaterializedViewRewrite(String mvName, ASTNode ast) throws SemanticException { + // Value for the flag + boolean enableFlag; + switch (ast.getChild(0).getType()) { + case HiveParser.TOK_REWRITE_ENABLED: + enableFlag = true; + break; + case HiveParser.TOK_REWRITE_DISABLED: + enableFlag = false; + break; + default: + throw new SemanticException("Invalid alter materialized view expression"); + } + + AlterMaterializedViewDesc alterMVDesc = + new AlterMaterializedViewDesc(AlterMaterializedViewTypes.UPDATE_REWRITE_FLAG); + alterMVDesc.setMaterializedViewName(mvName); + alterMVDesc.setRewriteEnableFlag(enableFlag); + + Table tab = getTable(mvName, true); + inputs.add(new ReadEntity(tab)); + outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_EXCLUSIVE)); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + alterMVDesc), conf)); + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 0bbd9be406..3663c849f2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -260,6 +260,8 @@ TOK_ALTERVIEW_DROPPARTS; TOK_ALTERVIEW_RENAME; TOK_CREATE_MATERIALIZED_VIEW; TOK_DROP_MATERIALIZED_VIEW; +TOK_ALTER_MATERIALIZED_VIEW; +TOK_ALTER_MATERIALIZED_VIEW_REWRITE; TOK_REWRITE_ENABLED; TOK_REWRITE_DISABLED; TOK_VIEWPARTCOLS; @@ -1300,6 +1302,8 @@ alterStatement @after { popMsg(state); } : KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^(TOK_ALTERTABLE tableName alterTableStatementSuffix) | KW_ALTER KW_VIEW tableName KW_AS? alterViewStatementSuffix -> ^(TOK_ALTERVIEW tableName alterViewStatementSuffix) + | KW_ALTER KW_MATERIALIZED KW_VIEW tableName alterMaterializedViewStatementSuffix + -> ^(TOK_ALTER_MATERIALIZED_VIEW tableName alterMaterializedViewStatementSuffix) | KW_ALTER KW_INDEX alterIndexStatementSuffix -> alterIndexStatementSuffix | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix ; @@ -1357,6 +1361,12 @@ alterViewStatementSuffix | selectStatementWithCTE ; +alterMaterializedViewStatementSuffix +@init { pushMsg("alter materialized view statement", state); } +@after { popMsg(state); } + : alterMaterializedViewSuffixRewrite + ; + alterIndexStatementSuffix @init { pushMsg("alter index statement", state); } @after { popMsg(state); } @@ -1524,6 +1534,13 @@ alterViewSuffixProperties -> ^(TOK_ALTERVIEW_DROPPROPERTIES tableProperties ifExists?) ; +alterMaterializedViewSuffixRewrite +@init { pushMsg("alter materialized view rewrite statement", state); } +@after { popMsg(state); } + : (mvRewriteFlag=rewriteEnabled | mvRewriteFlag=rewriteDisabled) + -> ^(TOK_ALTER_MATERIALIZED_VIEW_REWRITE $mvRewriteFlag) + ; + alterStatementSuffixSerdeProperties @init { pushMsg("alter serdes statement", state); } @after { popMsg(state); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index e704c73112..3dc1faa82f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -99,6 +99,8 @@ commandType.put(HiveParser.TOK_ALTERVIEW_DROPPARTS, HiveOperation.ALTERTABLE_DROPPARTS); commandType.put(HiveParser.TOK_ALTERVIEW_RENAME, HiveOperation.ALTERVIEW_RENAME); commandType.put(HiveParser.TOK_ALTERVIEW, HiveOperation.ALTERVIEW_AS); + commandType.put(HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REWRITE, + HiveOperation.ALTER_MATERIALIZED_VIEW_REWRITE); commandType.put(HiveParser.TOK_QUERY, HiveOperation.QUERY); commandType.put(HiveParser.TOK_LOCKTABLE, HiveOperation.LOCKTABLE); commandType.put(HiveParser.TOK_UNLOCKTABLE, HiveOperation.UNLOCKTABLE); @@ -263,6 +265,18 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t queryState.setCommandType(HiveOperation.ALTERVIEW_AS); return new SemanticAnalyzer(queryState); } + case HiveParser.TOK_ALTER_MATERIALIZED_VIEW: { + Tree child = tree.getChild(1); + switch (child.getType()) { + case HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REWRITE: + opType = commandType.get(child.getType()); + queryState.setCommandType(opType); + return new DDLSemanticAnalyzer(queryState); + } + // Operation not recognized, set to null and let upper level handle this case + queryState.setCommandType(null); + return new DDLSemanticAnalyzer(queryState); + } case HiveParser.TOK_CREATEDATABASE: case HiveParser.TOK_DROPDATABASE: case HiveParser.TOK_SWITCHDATABASE: diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java new file mode 100644 index 0000000000..4fcde11799 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java @@ -0,0 +1,105 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * AlterMaterializedViewDesc. + */ +@Explain(displayName = "Alter Materialized View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterMaterializedViewDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + private String materializedViewName; + private boolean rewriteEnable; + + /** + * alterMVTypes. + * + */ + public static enum AlterMaterializedViewTypes { + UPDATE_REWRITE_FLAG + }; + + AlterMaterializedViewTypes op; + + public AlterMaterializedViewDesc() { + } + + public AlterMaterializedViewDesc(AlterMaterializedViewTypes type) { + this.op = type; + } + + /** + * @return the name of the materializedViewName + */ + @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getMaterializedViewName() { + return materializedViewName; + } + + /** + * @param materializedViewName + * the materializedViewName to set + */ + public void setMaterializedViewName(String materializedViewName) { + this.materializedViewName = materializedViewName; + } + + /** + * @return the rewrite flag + */ + public boolean isRewriteEnable() { + return rewriteEnable; + } + + /** + * @param rewriteEnable + * the value for the flag + */ + public void setRewriteEnableFlag(boolean rewriteEnable) { + this.rewriteEnable = rewriteEnable; + } + + /** + * @return the op + */ + @Explain(displayName = "operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getOpString() { + return op.toString(); + } + + /** + * @return the op + */ + public AlterMaterializedViewTypes getOp() { + return op; + } + + /** + * @param op + * the op to set + */ + public void setOp(AlterMaterializedViewTypes op) { + this.op = op; + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index 369f8440a2..dc8f172674 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -38,6 +38,7 @@ private InsertTableDesc insertTableDesc; private CreateIndexDesc createIndexDesc; private AlterIndexDesc alterIndexDesc; + private AlterMaterializedViewDesc alterMVDesc; private DropIndexDesc dropIdxDesc; private CreateDatabaseDesc createDatabaseDesc; private SwitchDatabaseDesc switchDatabaseDesc; @@ -210,6 +211,16 @@ public DDLWork(HashSet inputs, HashSet outputs, } /** + * @param alterMVDesc + * alter materialized view descriptor + */ + public DDLWork(HashSet inputs, HashSet outputs, + AlterMaterializedViewDesc alterMVDesc) { + this(inputs, outputs); + this.alterMVDesc = alterMVDesc; + } + + /** * @param createTblDesc * create table descriptor */ @@ -770,6 +781,22 @@ public void setAlterTblDesc(AlterTableDesc alterTblDesc) { } /** + * @return the alterMVDesc + */ + @Explain(displayName = "Alter Materialized View Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public AlterMaterializedViewDesc getAlterMaterializedViewDesc() { + return alterMVDesc; + } + + /** + * @param alterMVDesc + * the alterMVDesc to set + */ + public void setAlterMVDesc(AlterMaterializedViewDesc alterMVDesc) { + this.alterMVDesc = alterMVDesc; + } + + /** * @return the showDatabasesDesc */ @Explain(displayName = "Show Databases Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index 1ce1c76f37..8b5326842f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -86,6 +86,8 @@ Privilege[]{Privilege.CREATE}), DROPVIEW("DROPVIEW", null, new Privilege[]{Privilege.DROP}), DROP_MATERIALIZED_VIEW("DROP_MATERIALIZED_VIEW", null, new Privilege[]{Privilege.DROP}), + ALTER_MATERIALIZED_VIEW_REWRITE("ALTER_MATERIALIZED_VIEW_REWRITE", + new Privilege[]{Privilege.ALTER_METADATA}, null), CREATEINDEX("CREATEINDEX", null, null), DROPINDEX("DROPINDEX", null, null), ALTERINDEX_REBUILD("ALTERINDEX_REBUILD", null, null), diff --git ql/src/test/queries/clientpositive/materialized_view_create_rewrite.q ql/src/test/queries/clientpositive/materialized_view_create_rewrite.q index b17517f76b..1749cb023a 100644 --- ql/src/test/queries/clientpositive/materialized_view_create_rewrite.q +++ ql/src/test/queries/clientpositive/materialized_view_create_rewrite.q @@ -30,6 +30,26 @@ select a, c from cmv_basetable where a = 3; select a, c from cmv_basetable where a = 3; +alter materialized view cmv_mat_view2 disable rewrite; + +explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a); + +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a); + +explain +alter materialized view cmv_mat_view2 enable rewrite; + +alter materialized view cmv_mat_view2 enable rewrite; + explain select * from ( (select a, c from cmv_basetable where a = 3) table1 diff --git ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out index f6b161b690..81a7950773 100644 --- ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out +++ ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out @@ -119,6 +119,135 @@ POSTHOOK: Input: default@cmv_mat_view2 #### A masked pattern was here #### 3 978.76 3 9.80 +PREHOOK: query: alter materialized view cmv_mat_view2 disable rewrite +PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE +PREHOOK: Input: default@cmv_mat_view2 +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: alter materialized view cmv_mat_view2 disable rewrite +POSTHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE +POSTHOOK: Input: default@cmv_mat_view2 +POSTHOOK: Output: default@cmv_mat_view2 +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: cmv_basetable + Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (a = 3) (type: boolean) + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + TableScan + alias: cmv_basetable + Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((3 = a) and (d = 3)) (type: boolean) + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c (type: decimal(10,2)) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: decimal(10,2)) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +PREHOOK: type: QUERY +PREHOOK: Input: default@cmv_basetable +#### A masked pattern was here #### +POSTHOOK: query: select * from ( + (select a, c from cmv_basetable where a = 3) table1 + join + (select a, c from cmv_basetable where d = 3) table2 + on table1.a = table2.a) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@cmv_basetable +#### A masked pattern was here #### +3 9.80 3 978.76 +3 978.76 3 978.76 +PREHOOK: query: explain +alter materialized view cmv_mat_view2 enable rewrite +PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE +POSTHOOK: query: explain +alter materialized view cmv_mat_view2 enable rewrite +POSTHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Alter Materialized View Operator: + Alter Materialized View + name: default.cmv_mat_view2 + operation: UPDATE_REWRITE_FLAG + +PREHOOK: query: alter materialized view cmv_mat_view2 enable rewrite +PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE +PREHOOK: Input: default@cmv_mat_view2 +PREHOOK: Output: default@cmv_mat_view2 +POSTHOOK: query: alter materialized view cmv_mat_view2 enable rewrite +POSTHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE +POSTHOOK: Input: default@cmv_mat_view2 +POSTHOOK: Output: default@cmv_mat_view2 Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: explain select * from (