diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewDesc.java new file mode 100644 index 0000000000..f0afccacb5 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewDesc.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.alter; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for all the ALTER MATERIALIZED VIEW commands. + */ +@Explain(displayName = "Alter Materialized View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +abstract class AlterMaterializedViewDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + /** + * Alter Materialized View Types. + */ + enum AlterMaterializedViewTypes { + UPDATE_REWRITE_FLAG + }; + + private AlterMaterializedViewTypes op; + + AlterMaterializedViewDesc(AlterMaterializedViewTypes type) { + this.op = type; + } + + @Explain(displayName = "operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getOpString() { + return op.toString(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteDesc.java similarity index 52% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteDesc.java index 865d1431d1..72cc84c5cd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteDesc.java @@ -16,97 +16,45 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; +package org.apache.hadoop.hive.ql.ddl.alter; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; /** - * AlterMaterializedViewDesc. + * DDL task description for the ALTER MATERIALIZED VIEW commands. */ @Explain(displayName = "Alter Materialized View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class AlterMaterializedViewDesc extends DDLDesc implements Serializable, DDLDescWithWriteId { +public class AlterMaterializedViewRewriteDesc extends AlterMaterializedViewDesc implements DDLDescWithWriteId { private static final long serialVersionUID = 1L; - private String fqMaterializedViewName; - private boolean rewriteEnable; - - /** - * alterMVTypes. - * - */ - public static enum AlterMaterializedViewTypes { - UPDATE_REWRITE_FLAG - }; - - AlterMaterializedViewTypes op; - private long writeId; - public AlterMaterializedViewDesc() { + static { + DDLTask2.registerOperation(AlterMaterializedViewRewriteDesc.class, AlterMaterializedViewRewriteOperation.class); } - public AlterMaterializedViewDesc(AlterMaterializedViewTypes type) { - this.op = type; + private final String fqMaterializedViewName; + private final boolean rewriteEnable; + + public AlterMaterializedViewRewriteDesc(String fqMaterializedViewName, boolean rewriteEnable) { + super(AlterMaterializedViewTypes.UPDATE_REWRITE_FLAG); + this.fqMaterializedViewName = fqMaterializedViewName; + this.rewriteEnable = rewriteEnable; } - /** - * @return the name of the materializedViewName - */ @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getMaterializedViewName() { return fqMaterializedViewName; } - /** - * @param materializedViewName - * the materializedViewName to set - */ - public void setFqMaterializedViewName(String materializedViewName) { - this.fqMaterializedViewName = materializedViewName; - } - - /** - * @return the rewrite flag - */ public boolean isRewriteEnable() { return rewriteEnable; } - /** - * @param rewriteEnable - * the value for the flag - */ - public void setRewriteEnableFlag(boolean rewriteEnable) { - this.rewriteEnable = rewriteEnable; - } - - /** - * @return the op - */ - @Explain(displayName = "operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getOpString() { - return op.toString(); - } - - /** - * @return the op - */ - public AlterMaterializedViewTypes getOp() { - return op; - } - - /** - * @param op - * the op to set - */ - public void setOp(AlterMaterializedViewTypes op) { - this.op = op; - } - @Override public void setWriteId(long writeId) { - this.writeId = writeId; + // We don't actually need the write id, but by implementing DDLDescWithWriteId it ensures that it is allocated } @Override @@ -118,5 +66,4 @@ public String getFullTableName() { public boolean mayNeedWriteId() { return true; // Verified when this is set as DDL Desc for ACID. } - } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteOperation.java new file mode 100644 index 0000000000..379f472452 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteOperation.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.alter; + +import org.apache.calcite.rel.RelNode; +import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.metastore.api.EnvironmentContext; +import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.CalcitePlanner; +import org.apache.hadoop.hive.ql.parse.ParseUtils; + +/** + * Operation process of enabling/disabling materialized view rewrite. + */ +public class AlterMaterializedViewRewriteOperation extends DDLOperation { + private final AlterMaterializedViewRewriteDesc desc; + + public AlterMaterializedViewRewriteOperation(DDLOperationContext context, AlterMaterializedViewRewriteDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + Table mv = context.getDb().getTable(desc.getMaterializedViewName()); + if (mv.isRewriteEnabled() == desc.isRewriteEnable()) { + // This is a noop, return successfully + return 0; + } + Table newMV = mv.copy(); // Do not mess with Table instance + + if (desc.isRewriteEnable()) { + try { + QueryState qs = new QueryState.Builder().withHiveConf(context.getConf()).build(); + CalcitePlanner planner = new CalcitePlanner(qs); + Context ctx = new Context(context.getConf()); + ctx.setIsLoadingMaterializedView(true); + planner.initCtx(ctx); + planner.init(false); + + RelNode plan = planner.genLogicalPlan(ParseUtils.parse(newMV.getViewExpandedText())); + if (plan == null) { + String msg = "Cannot enable automatic rewriting for materialized view."; + if (ctx.getCboInfo() != null) { + msg += " " + ctx.getCboInfo(); + } + throw new HiveException(msg); + } + if (!planner.isValidAutomaticRewritingMaterialization()) { + throw new HiveException("Cannot enable rewriting for materialized view. " + + planner.getInvalidAutomaticRewritingMaterializationReason()); + } + } catch (Exception e) { + throw new HiveException(e); + } + } + + newMV.setRewriteEnabled(desc.isRewriteEnable()); + EnvironmentContext environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); + context.getDb().alterTable(newMV, false, environmentContext, true); + + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/package-info.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/package-info.java new file mode 100644 index 0000000000..b113f39db8 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Alter DDL operation descriptions and operations. */ +package org.apache.hadoop.hive.ql.ddl.alter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewDesc.java similarity index 87% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewDesc.java index b693fdb845..ebc7e00b7d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.table; import java.io.Serializable; import java.util.List; @@ -28,38 +28,44 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.exec.DDLTask; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; +import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - /** - * CreateViewDesc. - * + * DDL task description for CREATE VIEW commands. */ @Explain(displayName = "Create View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class CreateViewDesc extends DDLDesc implements Serializable { +public class CreateViewDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - private static Logger LOG = LoggerFactory.getLogger(CreateViewDesc.class); + private static final Logger LOG = LoggerFactory.getLogger(CreateViewDesc.class); + + static { + DDLTask2.registerOperation(CreateViewDesc.class, CreateViewOperation.class); + } private String viewName; - private String originalText; - private String expandedText; - private boolean rewriteEnabled; private List schema; + private String comment; private Map tblProps; private List partColNames; - private List partCols; - private String comment; private boolean ifNotExists; - private boolean replace; + private boolean orReplace; + + private String originalText; + private String expandedText; + private boolean rewriteEnabled; + private List partCols; private boolean isAlterViewAs; private boolean isMaterialized; private String inputFormat; @@ -73,40 +79,20 @@ private String ownerName = null; /** - * For serialization only. - */ - public CreateViewDesc() { - } - - /** - * Used to create a materialized view descriptor - * @param viewName - * @param schema - * @param comment - * @param tblProps - * @param partColNames - * @param ifNotExists - * @param replace - * @param isAlterViewAs - * @param inputFormat - * @param outputFormat - * @param location - * @param serde - * @param storageHandler - * @param serdeProps + * Used to create a materialized view descriptor. */ - public CreateViewDesc(String viewName, List schema, String comment, - Map tblProps, List partColNames, - boolean ifNotExists, boolean replace, boolean rewriteEnabled, boolean isAlterViewAs, + public CreateViewDesc(String viewName, List schema, String comment, Map tblProps, + List partColNames, boolean ifNotExists, boolean orReplace, boolean rewriteEnabled, boolean isAlterViewAs, String inputFormat, String outputFormat, String location, String serde, String storageHandler, Map serdeProps) { this.viewName = viewName; this.schema = schema; + this.comment = comment; this.tblProps = tblProps; this.partColNames = partColNames; - this.comment = comment; this.ifNotExists = ifNotExists; - this.replace = replace; + this.orReplace = orReplace; + this.isMaterialized = true; this.rewriteEnabled = rewriteEnabled; this.isAlterViewAs = isAlterViewAs; @@ -119,30 +105,20 @@ public CreateViewDesc(String viewName, List schema, String comment, } /** - * Used to create a view descriptor - * @param viewName - * @param schema - * @param comment - * @param tblProps - * @param partColNames - * @param ifNotExists - * @param orReplace - * @param isAlterViewAs - * @param inputFormat - * @param outputFormat - * @param serde + * Used to create a view descriptor. */ - public CreateViewDesc(String viewName, List schema, String comment, - Map tblProps, List partColNames, - boolean ifNotExists, boolean orReplace, boolean isAlterViewAs, + public CreateViewDesc(String viewName, List schema, String comment, Map tblProps, + List partColNames, boolean ifNotExists, boolean orReplace, + boolean isAlterViewAs, String inputFormat, String outputFormat, String serde) { this.viewName = viewName; this.schema = schema; + this.comment = comment; this.tblProps = tblProps; this.partColNames = partColNames; - this.comment = comment; this.ifNotExists = ifNotExists; - this.replace = orReplace; + this.orReplace = orReplace; + this.isAlterViewAs = isAlterViewAs; this.isMaterialized = false; this.rewriteEnabled = false; @@ -258,11 +234,11 @@ public void setTablesUsed(Set tablesUsed) { @Explain(displayName = "replace", displayOnlyOnTrue = true) public boolean isReplace() { - return replace; + return orReplace; } public void setReplace(boolean replace) { - this.replace = replace; + this.orReplace = replace; } @Explain(displayName = "is alter view as select", displayOnlyOnTrue = true) @@ -386,12 +362,10 @@ public Table toTable(HiveConf conf) throws HiveException { if (getSerde() == null) { if (storageHandler == null) { serDeClassName = PlanUtils.getDefaultSerDe().getName(); - LOG.info("Default to {} for materialized view {}", serDeClassName, - getViewName()); + LOG.info("Default to {} for materialized view {}", serDeClassName, getViewName()); } else { serDeClassName = storageHandler.getSerDeClass().getName(); - LOG.info("Use StorageHandler-supplied {} for materialized view {}", - serDeClassName, getViewName()); + LOG.info("Use StorageHandler-supplied {} for materialized view {}", serDeClassName, getViewName()); } } else { // let's validate that the serde exists diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewOperation.java new file mode 100644 index 0000000000..bb9b18977b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewOperation.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.ValidTxnWriteIdList; +import org.apache.hadoop.hive.metastore.api.CreationMetadata; +import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; + +import com.google.common.collect.ImmutableSet; + +/** + * Operation process of creating a view. + */ +public class CreateViewOperation extends DDLOperation { + private final CreateViewDesc desc; + + public CreateViewOperation(DDLOperationContext context, CreateViewDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + Table oldview = context.getDb().getTable(desc.getViewName(), false); + if (oldview != null) { + // Check whether we are replicating + if (desc.getReplicationSpec().isInReplicationScope()) { + // if this is a replication spec, then replace-mode semantics might apply. + if (desc.getReplicationSpec().allowEventReplacementInto(oldview.getParameters())){ + desc.setReplace(true); // we replace existing view. + } else { + LOG.debug("DDLTask: Create View is skipped as view {} is newer than update", + desc.getViewName()); // no replacement, the existing table state is newer than our update. + return 0; + } + } + + if (!desc.isReplace() && !desc.getIfNotExists()) { + // View already exists, thus we should be replacing + throw new HiveException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(desc.getViewName())); + } + + // It should not be a materialized view + assert !desc.isMaterialized(); + + // replace existing view + // remove the existing partition columns from the field schema + oldview.setViewOriginalText(desc.getViewOriginalText()); + oldview.setViewExpandedText(desc.getViewExpandedText()); + oldview.setFields(desc.getSchema()); + if (desc.getComment() != null) { + oldview.setProperty("comment", desc.getComment()); + } + if (desc.getTblProps() != null) { + oldview.getTTable().getParameters().putAll(desc.getTblProps()); + } + oldview.setPartCols(desc.getPartCols()); + if (desc.getInputFormat() != null) { + oldview.setInputFormatClass(desc.getInputFormat()); + } + if (desc.getOutputFormat() != null) { + oldview.setOutputFormatClass(desc.getOutputFormat()); + } + oldview.checkValidity(null); + if (desc.getOwnerName() != null) { + oldview.setOwner(desc.getOwnerName()); + } + context.getDb().alterTable(desc.getViewName(), oldview, false, null, true); + DDLUtils.addIfAbsentByName(new WriteEntity(oldview, WriteEntity.WriteType.DDL_NO_LOCK), + context.getWork().getOutputs()); + } else { + // We create new view + Table tbl = desc.toTable(context.getConf()); + // We set the signature for the view if it is a materialized view + if (tbl.isMaterializedView()) { + CreationMetadata cm = + new CreationMetadata(MetaStoreUtils.getDefaultCatalog(context.getConf()), tbl.getDbName(), + tbl.getTableName(), ImmutableSet.copyOf(desc.getTablesUsed())); + cm.setValidTxnList(context.getConf().get(ValidTxnWriteIdList.VALID_TABLES_WRITEIDS_KEY)); + tbl.getTTable().setCreationMetadata(cm); + } + context.getDb().createTable(tbl, desc.getIfNotExists()); + DDLUtils.addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK), + context.getWork().getOutputs()); + + //set lineage info + DataContainer dc = new DataContainer(tbl.getTTable()); + context.getQueryState().getLineageState().setLineage(new Path(desc.getViewName()), dc, tbl.getCols()); + } + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index f4281bdd7b..ed797fc5dc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -42,11 +42,9 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; -import org.apache.calcite.rel.RelNode; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -54,7 +52,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; -import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.DefaultHiveMetaHook; @@ -66,7 +63,6 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.CompactionResponse; -import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -92,7 +88,6 @@ import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.txn.TxnStore; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; @@ -104,7 +99,6 @@ import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager; import org.apache.hadoop.hive.ql.exec.tez.TezTask; import org.apache.hadoop.hive.ql.exec.tez.WorkloadManager; -import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.AcidUtils; @@ -131,16 +125,13 @@ import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; -import org.apache.hadoop.hive.ql.parse.CalcitePlanner; import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; -import org.apache.hadoop.hive.ql.parse.ParseUtils; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.dump.Utils; import org.apache.hadoop.hive.ql.plan.AbortTxnsDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; -import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc; import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; @@ -153,7 +144,6 @@ import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc; import org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc; import org.apache.hadoop.hive.ql.plan.CreateResourcePlanDesc; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.CreateWMTriggerDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; @@ -308,11 +298,6 @@ public int execute(DriverContext driverContext) { } } - CreateViewDesc crtView = work.getCreateViewDesc(); - if (crtView != null) { - return createView(db, crtView); - } - AddPartitionDesc addPartitionDesc = work.getAddPartitionDesc(); if (addPartitionDesc != null) { return addPartitions(db, addPartitionDesc); @@ -493,10 +478,6 @@ public int execute(DriverContext driverContext) { return createOrDropTriggerToPoolMapping(db, work.getTriggerToPoolMappingDesc()); } - if (work.getAlterMaterializedViewDesc() != null) { - return alterMaterializedView(db, work.getAlterMaterializedViewDesc()); - } - if (work.getReplSetFirstIncLoadFlagDesc() != null) { return remFirstIncPendFlag(db, work.getReplSetFirstIncLoadFlagDesc()); } @@ -999,68 +980,6 @@ private void writeListToFileAfterSort(List entries, String resFile) thro writeToFile(sb.toString(), resFile); } - /** - * Alters a materialized view. - * - * @param db - * Database that the materialized view belongs to. - * @param alterMVDesc - * Descriptor of the changes. - * @return Returns 0 when execution succeeds and above 0 if it fails. - * @throws HiveException - * @throws InvalidOperationException - */ - private int alterMaterializedView(Hive db, AlterMaterializedViewDesc alterMVDesc) throws HiveException { - String mvName = alterMVDesc.getMaterializedViewName(); - // It can be fully qualified name or use default database - Table oldMV = db.getTable(mvName); - Table mv = oldMV.copy(); // Do not mess with Table instance - EnvironmentContext environmentContext = new EnvironmentContext(); - environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); - - switch (alterMVDesc.getOp()) { - case UPDATE_REWRITE_FLAG: - if (mv.isRewriteEnabled() == alterMVDesc.isRewriteEnable()) { - // This is a noop, return successfully - return 0; - } - if (alterMVDesc.isRewriteEnable()) { - try { - final QueryState qs = - new QueryState.Builder().withHiveConf(conf).build(); - final CalcitePlanner planner = new CalcitePlanner(qs); - final Context ctx = new Context(conf); - ctx.setIsLoadingMaterializedView(true); - planner.initCtx(ctx); - planner.init(false); - final RelNode plan = planner.genLogicalPlan(ParseUtils.parse(mv.getViewExpandedText())); - if (plan == null) { - String msg = "Cannot enable automatic rewriting for materialized view."; - if (ctx.getCboInfo() != null) { - msg += " " + ctx.getCboInfo(); - } - throw new HiveException(msg); - } - if (!planner.isValidAutomaticRewritingMaterialization()) { - throw new HiveException("Cannot enable rewriting for materialized view. " + - planner.getInvalidAutomaticRewritingMaterializationReason()); - } - } catch (Exception e) { - throw new HiveException(e); - } - } - mv.setRewriteEnabled(alterMVDesc.isRewriteEnable()); - break; - - default: - throw new AssertionError("Unsupported alter materialized view type! : " + alterMVDesc.getOp()); - } - - db.alterTable(mv, false, environmentContext, true); - - return 0; - } - /** * Add a partitions to a table. * @@ -3424,85 +3343,6 @@ public static void validateSerDe(String serdeName, HiveConf conf) throws HiveExc } } - /** - * Create a new view. - * - * @param db - * The database in question. - * @param crtView - * This is the view we're creating. - * @return Returns 0 when execution succeeds and above 0 if it fails. - * @throws HiveException - * Throws this exception if an unexpected error occurs. - */ - private int createView(Hive db, CreateViewDesc crtView) throws HiveException { - Table oldview = db.getTable(crtView.getViewName(), false); - if (oldview != null) { - // Check whether we are replicating - if (crtView.getReplicationSpec().isInReplicationScope()) { - // if this is a replication spec, then replace-mode semantics might apply. - if (crtView.getReplicationSpec().allowEventReplacementInto(oldview.getParameters())){ - crtView.setReplace(true); // we replace existing view. - } else { - LOG.debug("DDLTask: Create View is skipped as view {} is newer than update", - crtView.getViewName()); // no replacement, the existing table state is newer than our update. - return 0; - } - } - - if (!crtView.isReplace() && !crtView.getIfNotExists()) { - // View already exists, thus we should be replacing - throw new HiveException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(crtView.getViewName())); - } - - // It should not be a materialized view - assert !crtView.isMaterialized(); - - // replace existing view - // remove the existing partition columns from the field schema - oldview.setViewOriginalText(crtView.getViewOriginalText()); - oldview.setViewExpandedText(crtView.getViewExpandedText()); - oldview.setFields(crtView.getSchema()); - if (crtView.getComment() != null) { - oldview.setProperty("comment", crtView.getComment()); - } - if (crtView.getTblProps() != null) { - oldview.getTTable().getParameters().putAll(crtView.getTblProps()); - } - oldview.setPartCols(crtView.getPartCols()); - if (crtView.getInputFormat() != null) { - oldview.setInputFormatClass(crtView.getInputFormat()); - } - if (crtView.getOutputFormat() != null) { - oldview.setOutputFormatClass(crtView.getOutputFormat()); - } - oldview.checkValidity(null); - if (crtView.getOwnerName() != null) { - oldview.setOwner(crtView.getOwnerName()); - } - db.alterTable(crtView.getViewName(), oldview, false, null, true); - addIfAbsentByName(new WriteEntity(oldview, WriteEntity.WriteType.DDL_NO_LOCK)); - } else { - // We create new view - Table tbl = crtView.toTable(conf); - // We set the signature for the view if it is a materialized view - if (tbl.isMaterializedView()) { - CreationMetadata cm = - new CreationMetadata(MetaStoreUtils.getDefaultCatalog(conf), tbl.getDbName(), - tbl.getTableName(), ImmutableSet.copyOf(crtView.getTablesUsed())); - cm.setValidTxnList(conf.get(ValidTxnWriteIdList.VALID_TABLES_WRITEIDS_KEY)); - tbl.getTTable().setCreationMetadata(cm); - } - db.createTable(tbl, crtView.getIfNotExists()); - addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK)); - - //set lineage info - DataContainer dc = new DataContainer(tbl.getTTable()); - queryState.getLineageState().setLineage(new Path(crtView.getViewName()), dc, tbl.getCols()); - } - return 0; - } - private int exchangeTablePartition(Hive db, AlterTableExchangePartition exchangePartition) throws HiveException { Map partitionSpecs = exchangePartition.getPartitionSpecs(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index baf635633d..0828fade5f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -71,6 +71,7 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.alter.AlterMaterializedViewRewriteDesc; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.DescDatabaseDesc; @@ -120,8 +121,6 @@ import org.apache.hadoop.hive.ql.plan.AbortTxnsDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc.OnePartitionDesc; -import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc; -import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc.AlterMaterializedViewTypes; import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; @@ -4398,10 +4397,7 @@ private void analyzeAlterMaterializedViewRewrite(String fqMvName, ASTNode ast) t throw new SemanticException("Invalid alter materialized view expression"); } - AlterMaterializedViewDesc alterMVDesc = - new AlterMaterializedViewDesc(AlterMaterializedViewTypes.UPDATE_REWRITE_FLAG); - alterMVDesc.setFqMaterializedViewName(fqMvName); - alterMVDesc.setRewriteEnableFlag(enableFlag); + AlterMaterializedViewRewriteDesc alterMVRewriteDesc = new AlterMaterializedViewRewriteDesc(fqMvName, enableFlag); // It can be fully qualified name or use default database Table materializedViewTable = getTable(fqMvName, true); @@ -4419,13 +4415,12 @@ private void analyzeAlterMaterializedViewRewrite(String fqMvName, ASTNode ast) t } if (AcidUtils.isTransactionalTable(materializedViewTable)) { - setAcidDdlDesc(alterMVDesc); + setAcidDdlDesc(alterMVRewriteDesc); } inputs.add(new ReadEntity(materializedViewTable)); outputs.add(new WriteEntity(materializedViewTable, WriteEntity.WriteType.DDL_EXCLUSIVE)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterMVDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterMVRewriteDesc))); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java index 77e181863e..c4e6e5cc53 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.QueryProperties; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.GroupByOperator; @@ -44,7 +45,6 @@ import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc.SampleDesc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java index 0405ee8f02..4c1e2a27cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java @@ -31,8 +31,8 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; /** * Implementation of the query block. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 1e79f325f8..b4b5ebd374 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -102,6 +102,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLWork2; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; import org.apache.hadoop.hive.ql.ddl.table.CreateTableLikeDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.ddl.table.PreInsertTableDesc; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ArchiveUtils; @@ -195,7 +196,6 @@ import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; @@ -13750,8 +13750,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt dbDotTable, cols, comment, tblProps, partColNames, ifNotExists, orReplace, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), storageFormat.getSerde()); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - createVwDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), createVwDesc))); addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATEVIEW); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 0b6ff524b1..67d27cdd87 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -32,9 +32,12 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.alter.AlterMaterializedViewRewriteDesc; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; -import org.apache.hadoop.hive.ql.exec.DDLTask; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.MaterializedViewDesc; @@ -61,8 +64,6 @@ import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec; import org.apache.hadoop.hive.ql.plan.BasicStatsWork; import org.apache.hadoop.hive.ql.plan.ColumnStatsDesc; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.LoadFileDesc; @@ -118,7 +119,7 @@ public void init(QueryState queryState, LogHelper console, Hive db) { this.console = console; } - @SuppressWarnings({"nls", "unchecked"}) + @SuppressWarnings("nls") public void compile(final ParseContext pCtx, final List> rootTasks, final HashSet inputs, final HashSet outputs) throws SemanticException { @@ -364,7 +365,7 @@ public void compile(final ParseContext pCtx, } else if (pCtx.getQueryProperties().isMaterializedView()) { // generate a DDL task and make it a dependent task of the leaf CreateViewDesc viewDesc = pCtx.getCreateViewDesc(); - Task crtViewTask = TaskFactory.get(new DDLWork( + Task crtViewTask = TaskFactory.get(new DDLWork2( inputs, outputs, viewDesc)); patchUpAfterCTASorMaterializedView(rootTasks, outputs, crtViewTask, CollectionUtils.isEmpty(viewDesc.getPartColNames())); } else if (pCtx.getMaterializedViewUpdateDesc() != null) { @@ -544,28 +545,26 @@ private void patchUpAfterCTASorMaterializedView(final List dependentTask, Task loadFileWork, Map map, int outerQueryLimit, int numBitVector) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index 2b653a5d21..73acc31a27 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -34,8 +34,6 @@ // TODO: this can probably be replaced with much less code via dynamic dispatch and/or templates. private InsertCommitHookDesc insertCommitHookDesc; - private AlterMaterializedViewDesc alterMVDesc; - private CreateViewDesc createVwDesc; private DropPartitionDesc dropPartitionDesc; private AlterTableDesc alterTblDesc; private ShowColumnsDesc showColumnsDesc; @@ -118,27 +116,6 @@ public DDLWork(HashSet inputs, HashSet outputs, this.alterTblDesc = alterTblDesc; } - /** - * @param alterMVDesc - * alter materialized view descriptor - */ - public DDLWork(HashSet inputs, HashSet outputs, - AlterMaterializedViewDesc alterMVDesc) { - this(inputs, outputs); - this.alterMVDesc = alterMVDesc; - } - - /** - * @param createVwDesc - * create view descriptor - */ - public DDLWork(HashSet inputs, HashSet outputs, - CreateViewDesc createVwDesc) { - this(inputs, outputs); - - this.createVwDesc = createVwDesc; - } - /** * @param dropTblDesc * drop table descriptor @@ -404,14 +381,6 @@ public DDLWork(HashSet inputs, HashSet outputs, this.replSetFirstIncLoadFlagDesc = replSetFirstIncLoadFlagDesc; } - /** - * @return the createTblDesc - */ - @Explain(displayName = "Create View Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public CreateViewDesc getCreateViewDesc() { - return createVwDesc; - } - /** * @return the dropTblDesc */ @@ -428,15 +397,6 @@ public AlterTableDesc getAlterTblDesc() { return alterTblDesc; } - - /** - * @return the alterMVDesc - */ - @Explain(displayName = "Alter Materialized View Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public AlterMaterializedViewDesc getAlterMaterializedViewDesc() { - return alterMVDesc; - } - /** * @return the showColumnsDesc */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index 381c3b54a5..8db5d33868 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.ddl.DDLWork2; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -327,7 +328,7 @@ public String getDatabaseName() { case TABLE: return TaskFactory.get(new DDLWork2(inputs, outputs, createTblDesc), conf); case VIEW: - return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), conf); + return TaskFactory.get(new DDLWork2(inputs, outputs, createViewDesc), conf); } return null; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java index 3abdc4859f..5d4e93e74a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java @@ -22,6 +22,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.AcidUtils; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index b668e40594..15e922ebd5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.TableScanOperator; diff --git a/ql/src/test/results/clientnegative/create_view_failure1.q.out b/ql/src/test/results/clientnegative/create_view_failure1.q.out index 2b9a324c06..98927e0f0e 100644 --- a/ql/src/test/results/clientnegative/create_view_failure1.q.out +++ b/ql/src/test/results/clientnegative/create_view_failure1.q.out @@ -15,4 +15,4 @@ PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@xxx12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Table already exists: default.xxx12 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Table already exists: default.xxx12 diff --git a/ql/src/test/results/clientnegative/create_view_failure4.q.out b/ql/src/test/results/clientnegative/create_view_failure4.q.out index 300715d4cf..d9dd837fb2 100644 --- a/ql/src/test/results/clientnegative/create_view_failure4.q.out +++ b/ql/src/test/results/clientnegative/create_view_failure4.q.out @@ -8,4 +8,4 @@ PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@xxx5 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Duplicate column name x in the table definition. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: Duplicate column name x in the table definition. diff --git a/ql/src/test/results/clientnegative/masking_mv.q.out b/ql/src/test/results/clientnegative/masking_mv.q.out index 6d5a46982e..1e18b36f7d 100644 --- a/ql/src/test/results/clientnegative/masking_mv.q.out +++ b/ql/src/test/results/clientnegative/masking_mv.q.out @@ -101,13 +101,12 @@ STAGE PLANS: #### A masked pattern was here #### Stage: Stage-8 - Create View Operator: - Create View - columns: key int - expanded text: select `masking_test_n_mv`.`key` from `default`.`masking_test_n_mv` - name: default.masking_test_view_n_mv - original text: select key from masking_test_n_mv - rewrite enabled: true + Create View + columns: key int + expanded text: select `masking_test_n_mv`.`key` from `default`.`masking_test_n_mv` + name: default.masking_test_view_n_mv + original text: select key from masking_test_n_mv + rewrite enabled: true Stage: Stage-2 Stats Work diff --git a/ql/src/test/results/clientnegative/materialized_view_no_cbo_rewrite_2.q.out b/ql/src/test/results/clientnegative/materialized_view_no_cbo_rewrite_2.q.out index b04004b0d2..ee946ae8f0 100644 --- a/ql/src/test/results/clientnegative/materialized_view_no_cbo_rewrite_2.q.out +++ b/ql/src/test/results/clientnegative/materialized_view_no_cbo_rewrite_2.q.out @@ -33,4 +33,4 @@ PREHOOK: query: alter materialized view cmv_mat_view enable rewrite PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE PREHOOK: Input: default@cmv_mat_view PREHOOK: Output: default@cmv_mat_view -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Cannot enable automatic rewriting for materialized view. Plan not optimized by CBO because the statement has sort by +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: Cannot enable automatic rewriting for materialized view. Plan not optimized by CBO because the statement has sort by diff --git a/ql/src/test/results/clientnegative/materialized_view_no_supported_op_rewrite_2.q.out b/ql/src/test/results/clientnegative/materialized_view_no_supported_op_rewrite_2.q.out index 0b72e3cb83..7a9ca99fc0 100644 --- a/ql/src/test/results/clientnegative/materialized_view_no_supported_op_rewrite_2.q.out +++ b/ql/src/test/results/clientnegative/materialized_view_no_supported_op_rewrite_2.q.out @@ -34,4 +34,4 @@ PREHOOK: query: alter materialized view cmv_mat_view enable rewrite PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE PREHOOK: Input: default@cmv_mat_view PREHOOK: Output: default@cmv_mat_view -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Cannot enable rewriting for materialized view. LEFT join type is not supported by rewriting algorithm. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: Cannot enable rewriting for materialized view. LEFT join type is not supported by rewriting algorithm. diff --git a/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out index 37cdb0af43..74ef098cf6 100644 --- a/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out +++ b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out @@ -263,10 +263,9 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 - Alter Materialized View Operator: - Alter Materialized View - name: default.cmv_mat_view2_n4 - operation: UPDATE_REWRITE_FLAG + Alter Materialized View + name: default.cmv_mat_view2_n4 + operation: UPDATE_REWRITE_FLAG PREHOOK: query: alter materialized view cmv_mat_view2_n4 enable rewrite PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE diff --git a/ql/src/test/results/clientpositive/create_view.q.out b/ql/src/test/results/clientpositive/create_view.q.out index 081ac6f9d3..f3d7520e1c 100644 --- a/ql/src/test/results/clientpositive/create_view.q.out +++ b/ql/src/test/results/clientpositive/create_view.q.out @@ -172,12 +172,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - columns: valoo string - expanded text: SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `default`.`src` WHERE `src`.`key`=86) `view0` - name: default.view0 - original text: SELECT upper(value) FROM src WHERE key=86 + Create View + columns: valoo string + expanded text: SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `default`.`src` WHERE `src`.`key`=86) `view0` + name: default.view0 + original text: SELECT upper(value) FROM src WHERE key=86 PREHOOK: query: EXPLAIN SELECT * from view2 where key=18 diff --git a/ql/src/test/results/clientpositive/create_view_translate.q.out b/ql/src/test/results/clientpositive/create_view_translate.q.out index 34ffb33bbf..b5d464e716 100644 --- a/ql/src/test/results/clientpositive/create_view_translate.q.out +++ b/ql/src/test/results/clientpositive/create_view_translate.q.out @@ -139,12 +139,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - columns: id int, _c1 string - expanded text: SELECT `items`.`id`, `items`.`info`['price'] FROM `default`.`items` - name: default.priceview - original text: SELECT items.id, items.info['price'] FROM items + Create View + columns: id int, _c1 string + expanded text: SELECT `items`.`id`, `items`.`info`['price'] FROM `default`.`items` + name: default.priceview + original text: SELECT items.id, items.info['price'] FROM items PREHOOK: query: CREATE VIEW priceview AS SELECT items.id, items.info['price'] FROM items PREHOOK: type: CREATEVIEW diff --git a/ql/src/test/results/clientpositive/explain_ddl.q.out b/ql/src/test/results/clientpositive/explain_ddl.q.out index c52eda11d7..c53ffae800 100644 --- a/ql/src/test/results/clientpositive/explain_ddl.q.out +++ b/ql/src/test/results/clientpositive/explain_ddl.q.out @@ -577,12 +577,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - columns: key string, value string - expanded text: select `m1`.`key`, `m1`.`value` from `default`.`M1` - name: default.V1_n0 - original text: select * from M1 + Create View + columns: key string, value string + expanded text: select `m1`.`key`, `m1`.`value` from `default`.`M1` + name: default.V1_n0 + original text: select * from M1 PREHOOK: query: EXPLAIN CREATE TABLE M1 LIKE src PREHOOK: type: CREATETABLE diff --git a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out index 1ea8fdcbb2..c9b2405abc 100644 --- a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out +++ b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out @@ -5445,11 +5445,7 @@ POSTHOOK: Output: default@mfgr_price_view_n3 Plan optimized by CBO. Stage-1 - Create View Operator: - name:default.mfgr_price_view_n3,original text:select p_mfgr, p_brand, -sum(p_retailprice) as s -from part -group by p_mfgr, p_brand + Create View{"name:":"default.mfgr_price_view_n3","original text:":"select p_mfgr, p_brand, \nsum(p_retailprice) as s \nfrom part \ngroup by p_mfgr, p_brand"} PREHOOK: query: CREATE TABLE part_4_n1( p_mfgr STRING, diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out index 6bf9ee5e54..0fc64bbb64 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out @@ -275,10 +275,9 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 - Alter Materialized View Operator: - Alter Materialized View - name: default.cmv_mat_view2_n4 - operation: UPDATE_REWRITE_FLAG + Alter Materialized View + name: default.cmv_mat_view2_n4 + operation: UPDATE_REWRITE_FLAG PREHOOK: query: alter materialized view cmv_mat_view2_n4 enable rewrite PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out index e0efe3cc1e..0c66bba6c5 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_3.q.out @@ -205,19 +205,18 @@ STAGE PLANS: Dependency Collection Stage: Stage-4 - Create View Operator: - Create View - columns: a int, c decimal(10,2) - expanded text: SELECT `cmv_basetable`.`a`, `cmv_basetable_2`.`c` + Create View + columns: a int, c decimal(10,2) + expanded text: SELECT `cmv_basetable`.`a`, `cmv_basetable_2`.`c` FROM `default`.`cmv_basetable` JOIN `default`.`cmv_basetable_2` ON (`cmv_basetable`.`a` = `cmv_basetable_2`.`a`) WHERE `cmv_basetable_2`.`c` > 10.0 GROUP BY `cmv_basetable`.`a`, `cmv_basetable_2`.`c` - name: default.cmv_mat_view - original text: SELECT cmv_basetable.a, cmv_basetable_2.c + name: default.cmv_mat_view + original text: SELECT cmv_basetable.a, cmv_basetable_2.c FROM cmv_basetable JOIN cmv_basetable_2 ON (cmv_basetable.a = cmv_basetable_2.a) WHERE cmv_basetable_2.c > 10.0 GROUP BY cmv_basetable.a, cmv_basetable_2.c - rewrite enabled: true + rewrite enabled: true Stage: Stage-3 Stats Work diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out index 6c3ba6cd96..15432c2b15 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out @@ -199,17 +199,16 @@ STAGE PLANS: Dependency Collection Stage: Stage-4 - Create View Operator: - Create View - columns: a int, c decimal(10,2), _c2 bigint - table properties: - transactional true - expanded text: SELECT `cmv_basetable_n5`.`a`, `cmv_basetable_2_n2`.`c`, sum(`cmv_basetable_2_n2`.`d`) + Create View + columns: a int, c decimal(10,2), _c2 bigint + table properties: + transactional true + expanded text: SELECT `cmv_basetable_n5`.`a`, `cmv_basetable_2_n2`.`c`, sum(`cmv_basetable_2_n2`.`d`) FROM `default`.`cmv_basetable_n5` JOIN `default`.`cmv_basetable_2_n2` ON (`cmv_basetable_n5`.`a` = `cmv_basetable_2_n2`.`a`) WHERE `cmv_basetable_2_n2`.`c` > 10.0 GROUP BY `cmv_basetable_n5`.`a`, `cmv_basetable_2_n2`.`c` - name: default.cmv_mat_view_n5 - original text: SELECT cmv_basetable_n5.a, cmv_basetable_2_n2.c, sum(cmv_basetable_2_n2.d) + name: default.cmv_mat_view_n5 + original text: SELECT cmv_basetable_n5.a, cmv_basetable_2_n2.c, sum(cmv_basetable_2_n2.d) FROM cmv_basetable_n5 JOIN cmv_basetable_2_n2 ON (cmv_basetable_n5.a = cmv_basetable_2_n2.a) WHERE cmv_basetable_2_n2.c > 10.0 GROUP BY cmv_basetable_n5.a, cmv_basetable_2_n2.c @@ -470,10 +469,9 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 - Alter Materialized View Operator: - Alter Materialized View - name: default.cmv_mat_view_n5 - operation: UPDATE_REWRITE_FLAG + Alter Materialized View + name: default.cmv_mat_view_n5 + operation: UPDATE_REWRITE_FLAG PREHOOK: query: ALTER MATERIALIZED VIEW cmv_mat_view_n5 ENABLE REWRITE PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out index 734ae685ef..37d80401f2 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out @@ -275,10 +275,9 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 - Alter Materialized View Operator: - Alter Materialized View - name: default.cmv_mat_view2 - operation: UPDATE_REWRITE_FLAG + Alter Materialized View + name: default.cmv_mat_view2 + operation: UPDATE_REWRITE_FLAG PREHOOK: query: alter materialized view cmv_mat_view2 enable rewrite PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out index b55287deb0..1697d4da5b 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_rebuild_dummy.q.out @@ -205,19 +205,18 @@ STAGE PLANS: Dependency Collection Stage: Stage-4 - Create View Operator: - Create View - columns: a int, c decimal(10,2) - expanded text: SELECT `cmv_basetable_n1`.`a`, `cmv_basetable_2_n0`.`c` + Create View + columns: a int, c decimal(10,2) + expanded text: SELECT `cmv_basetable_n1`.`a`, `cmv_basetable_2_n0`.`c` FROM `default`.`cmv_basetable_n1` JOIN `default`.`cmv_basetable_2_n0` ON (`cmv_basetable_n1`.`a` = `cmv_basetable_2_n0`.`a`) WHERE `cmv_basetable_2_n0`.`c` > 10.0 GROUP BY `cmv_basetable_n1`.`a`, `cmv_basetable_2_n0`.`c` - name: default.cmv_mat_view_n1 - original text: SELECT cmv_basetable_n1.a, cmv_basetable_2_n0.c + name: default.cmv_mat_view_n1 + original text: SELECT cmv_basetable_n1.a, cmv_basetable_2_n0.c FROM cmv_basetable_n1 JOIN cmv_basetable_2_n0 ON (cmv_basetable_n1.a = cmv_basetable_2_n0.a) WHERE cmv_basetable_2_n0.c > 10.0 GROUP BY cmv_basetable_n1.a, cmv_basetable_2_n0.c - rewrite enabled: true + rewrite enabled: true Stage: Stage-3 Stats Work diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out index cf6a6bace1..748883144b 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out @@ -205,17 +205,16 @@ STAGE PLANS: Dependency Collection Stage: Stage-4 - Create View Operator: - Create View - columns: a int, c decimal(10,2) - table properties: - rewriting.time.window 5min - expanded text: SELECT `cmv_basetable_n3`.`a`, `cmv_basetable_2_n1`.`c` + Create View + columns: a int, c decimal(10,2) + table properties: + rewriting.time.window 5min + expanded text: SELECT `cmv_basetable_n3`.`a`, `cmv_basetable_2_n1`.`c` FROM `default`.`cmv_basetable_n3` JOIN `default`.`cmv_basetable_2_n1` ON (`cmv_basetable_n3`.`a` = `cmv_basetable_2_n1`.`a`) WHERE `cmv_basetable_2_n1`.`c` > 10.0 GROUP BY `cmv_basetable_n3`.`a`, `cmv_basetable_2_n1`.`c` - name: default.cmv_mat_view_n3 - original text: SELECT cmv_basetable_n3.a, cmv_basetable_2_n1.c + name: default.cmv_mat_view_n3 + original text: SELECT cmv_basetable_n3.a, cmv_basetable_2_n1.c FROM cmv_basetable_n3 JOIN cmv_basetable_2_n1 ON (cmv_basetable_n3.a = cmv_basetable_2_n1.a) WHERE cmv_basetable_2_n1.c > 10.0 GROUP BY cmv_basetable_n3.a, cmv_basetable_2_n1.c @@ -470,10 +469,9 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 - Alter Materialized View Operator: - Alter Materialized View - name: default.cmv_mat_view_n3 - operation: UPDATE_REWRITE_FLAG + Alter Materialized View + name: default.cmv_mat_view_n3 + operation: UPDATE_REWRITE_FLAG PREHOOK: query: ALTER MATERIALIZED VIEW cmv_mat_view_n3 ENABLE REWRITE PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out index 19f014b57e..1cf8cc7829 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out @@ -108,14 +108,13 @@ STAGE PLANS: Dependency Collection Stage: Stage-4 - Create View Operator: - Create View - partition columns: key string - columns: value string - expanded text: SELECT `value`, `key` FROM (SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250) `partition_mv_1` - name: default.partition_mv_1 - original text: SELECT value, key FROM src_txn where key > 200 and key < 250 - rewrite enabled: true + Create View + partition columns: key string + columns: value string + expanded text: SELECT `value`, `key` FROM (SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250) `partition_mv_1` + name: default.partition_mv_1 + original text: SELECT value, key FROM src_txn where key > 200 and key < 250 + rewrite enabled: true Stage: Stage-0 Move Operator diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_partitioned_3.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_partitioned_3.q.out index 0153822e7f..784c26aee5 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_partitioned_3.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_partitioned_3.q.out @@ -121,14 +121,13 @@ STAGE PLANS: Dependency Collection Stage: Stage-4 - Create View Operator: - Create View - partition columns: key string - columns: value string - expanded text: SELECT `value`, `key` FROM (SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250) `partition_mv_sdp` - name: default.partition_mv_sdp - original text: SELECT value, key FROM src_txn where key > 200 and key < 250 - rewrite enabled: true + Create View + partition columns: key string + columns: value string + expanded text: SELECT `value`, `key` FROM (SELECT `src_txn`.`value`, `src_txn`.`key` FROM `default`.`src_txn` where `src_txn`.`key` > 200 and `src_txn`.`key` < 250) `partition_mv_sdp` + name: default.partition_mv_sdp + original text: SELECT value, key FROM src_txn where key > 200 and key < 250 + rewrite enabled: true Stage: Stage-0 Move Operator diff --git a/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out b/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out index e0d42892f8..756fea9ab8 100644 --- a/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out +++ b/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out @@ -1372,12 +1372,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - columns: key string, value string - expanded text: select distinct `src`.`key`, `src`.`value` from `default`.`src` order by `src`.`key` limit 2 - name: default.sdi - original text: select distinct * from src order by key limit 2 + Create View + columns: key string, value string + expanded text: select distinct `src`.`key`, `src`.`value` from `default`.`src` order by `src`.`key` limit 2 + name: default.sdi + original text: select distinct * from src order by key limit 2 PREHOOK: query: create view sdi as select distinct * from src order by key limit 2 PREHOOK: type: CREATEVIEW @@ -3858,12 +3857,11 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - columns: key string, value string - expanded text: select distinct `src`.`key`, `src`.`value` from `default`.`src` order by `src`.`key` limit 2 - name: default.sdi - original text: select distinct * from src order by key limit 2 + Create View + columns: key string, value string + expanded text: select distinct `src`.`key`, `src`.`value` from `default`.`src` order by `src`.`key` limit 2 + name: default.sdi + original text: select distinct * from src order by key limit 2 PREHOOK: query: create view sdi as select distinct * from src order by key limit 2 PREHOOK: type: CREATEVIEW diff --git a/ql/src/test/results/clientpositive/llap/union_top_level.q.out b/ql/src/test/results/clientpositive/llap/union_top_level.q.out index 574ce2ba45..23e676b759 100644 --- a/ql/src/test/results/clientpositive/llap/union_top_level.q.out +++ b/ql/src/test/results/clientpositive/llap/union_top_level.q.out @@ -1306,16 +1306,15 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - columns: key string, value int - expanded text: select `a`.`key`, `a`.`value` from (select `src`.`key`, 0 as `value` from `default`.`src` where `src`.`key` % 3 == 0 limit 3)`a` + Create View + columns: key string, value int + expanded text: select `a`.`key`, `a`.`value` from (select `src`.`key`, 0 as `value` from `default`.`src` where `src`.`key` % 3 == 0 limit 3)`a` union all select `b`.`key`, `b`.`value` from (select `src`.`key`, 1 as `value` from `default`.`src` where `src`.`key` % 3 == 1 limit 3)`b` union all select `c`.`key`, `c`.`value` from (select `src`.`key`, 2 as `value` from `default`.`src` where `src`.`key` % 3 == 2 limit 3)`c` - name: default.union_top_view - original text: select * from (select key, 0 as value from src where key % 3 == 0 limit 3)a + name: default.union_top_view + original text: select * from (select key, 0 as value from src where key % 3 == 0 limit 3)a union all select * from (select key, 1 as value from src where key % 3 == 1 limit 3)b union all diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing.q.out index 8774c04347..fc2e27ff4d 100644 --- a/ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ b/ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -4691,16 +4691,15 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - if not exists: true - columns: p_mfgr string, p_brand string, s double - expanded text: select `part`.`p_mfgr`, `part`.`p_brand`, + Create View + if not exists: true + columns: p_mfgr string, p_brand string, s double + expanded text: select `part`.`p_mfgr`, `part`.`p_brand`, round(sum(`part`.`p_retailprice`),2) as `s` from `default`.`part` group by `part`.`p_mfgr`, `part`.`p_brand` - name: default.mfgr_price_view_n2 - original text: select p_mfgr, p_brand, + name: default.mfgr_price_view_n2 + original text: select p_mfgr, p_brand, round(sum(p_retailprice),2) as s from part group by p_mfgr, p_brand @@ -5022,16 +5021,15 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - if not exists: true - columns: p_mfgr string, p_brand string, s double - expanded text: select `part`.`p_mfgr`, `part`.`p_brand`, + Create View + if not exists: true + columns: p_mfgr string, p_brand string, s double + expanded text: select `part`.`p_mfgr`, `part`.`p_brand`, round(sum(`part`.`p_retailprice`) over w1,2) as `s` from `default`.`part` window w1 as (distribute by `part`.`p_mfgr` sort by `part`.`p_name` rows between 2 preceding and current row) - name: default.mfgr_brand_price_view_n0 - original text: select p_mfgr, p_brand, + name: default.mfgr_brand_price_view_n0 + original text: select p_mfgr, p_brand, round(sum(p_retailprice) over w1,2) as s from part window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and current row) diff --git a/ql/src/test/results/clientpositive/masking_mv.q.out b/ql/src/test/results/clientpositive/masking_mv.q.out index afb0ecb672..5d84ddd32d 100644 --- a/ql/src/test/results/clientpositive/masking_mv.q.out +++ b/ql/src/test/results/clientpositive/masking_mv.q.out @@ -101,13 +101,12 @@ STAGE PLANS: #### A masked pattern was here #### Stage: Stage-8 - Create View Operator: - Create View - columns: key int - expanded text: select `masking_test_n_mv`.`key` from `default`.`masking_test_n_mv` - name: default.masking_test_view_n_mv - original text: select key from masking_test_n_mv - rewrite enabled: true + Create View + columns: key int + expanded text: select `masking_test_n_mv`.`key` from `default`.`masking_test_n_mv` + name: default.masking_test_view_n_mv + original text: select key from masking_test_n_mv + rewrite enabled: true Stage: Stage-2 Stats Work @@ -774,13 +773,12 @@ STAGE PLANS: #### A masked pattern was here #### Stage: Stage-8 - Create View Operator: - Create View - columns: key int - expanded text: select `srctnx`.`key` from `default`.`srcTnx` - name: default.masking_test_view_n_mv_2 - original text: select key from srcTnx - rewrite enabled: true + Create View + columns: key int + expanded text: select `srctnx`.`key` from `default`.`srcTnx` + name: default.masking_test_view_n_mv_2 + original text: select key from srcTnx + rewrite enabled: true Stage: Stage-2 Stats Work diff --git a/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out b/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out index b8403f4e39..f511ae5e3f 100644 --- a/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out +++ b/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out @@ -5303,11 +5303,7 @@ POSTHOOK: Output: default@mfgr_price_view_n1 Plan optimized by CBO. Stage-1 - Create View Operator: - name:default.mfgr_price_view_n1,original text:select p_mfgr, p_brand, -sum(p_retailprice) as s -from part -group by p_mfgr, p_brand + Create View{"name:":"default.mfgr_price_view_n1","original text:":"select p_mfgr, p_brand,\nsum(p_retailprice) as s\nfrom part\ngroup by p_mfgr, p_brand"} PREHOOK: query: CREATE TABLE part_4_n0( p_mfgr STRING, diff --git a/ql/src/test/results/clientpositive/spark/union_top_level.q.out b/ql/src/test/results/clientpositive/spark/union_top_level.q.out index 06f1bae0bd..c2314e8384 100644 --- a/ql/src/test/results/clientpositive/spark/union_top_level.q.out +++ b/ql/src/test/results/clientpositive/spark/union_top_level.q.out @@ -1045,16 +1045,15 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 - Create View Operator: - Create View - columns: key string, value int - expanded text: select `a`.`key`, `a`.`value` from (select `src`.`key`, 0 as `value` from `default`.`src` where `src`.`key` % 3 == 0 limit 3)`a` + Create View + columns: key string, value int + expanded text: select `a`.`key`, `a`.`value` from (select `src`.`key`, 0 as `value` from `default`.`src` where `src`.`key` % 3 == 0 limit 3)`a` union all select `b`.`key`, `b`.`value` from (select `src`.`key`, 1 as `value` from `default`.`src` where `src`.`key` % 3 == 1 limit 3)`b` union all select `c`.`key`, `c`.`value` from (select `src`.`key`, 2 as `value` from `default`.`src` where `src`.`key` % 3 == 2 limit 3)`c` - name: default.union_top_view - original text: select * from (select key, 0 as value from src where key % 3 == 0 limit 3)a + name: default.union_top_view + original text: select * from (select key, 0 as value from src where key % 3 == 0 limit 3)a union all select * from (select key, 1 as value from src where key % 3 == 1 limit 3)b union all diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out index eca2bf9e09..235f8c9d42 100644 --- a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out +++ b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out @@ -662,9 +662,7 @@ POSTHOOK: Output: default@v_n5 Plan optimized by CBO. Stage-1 - Create View Operator: - name:default.v_n5,original text:with cte as (select * from src order by key limit 5) -select * from cte + Create View{"name:":"default.v_n5","original text:":"with cte as (select * from src order by key limit 5)\nselect * from cte"} PREHOOK: query: with cte as (select * from src order by key limit 5) select * from cte diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out index 3cb41ad1f7..40d1c32153 100644 --- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out +++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out @@ -528,9 +528,7 @@ POSTHOOK: Output: default@v_n1 Plan optimized by CBO. Stage-1 - Create View Operator: - name:default.v_n1,original text:with cte as (select * from src order by key limit 5) -select * from cte + Create View{"name:":"default.v_n1","original text:":"with cte as (select * from src order by key limit 5)\nselect * from cte"} PREHOOK: query: explain with cte as (select * from src order by key limit 5) select * from cte