diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewDesc.java new file mode 100644 index 0000000000..04075ec3bb --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewDesc.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.alter; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for all the ALTER MATERIALIZED VIEW commands. + */ +@Explain(displayName = "Alter Materialized View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +abstract class AlterMaterializedViewDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + /** + * Alter Materialized View Types. + */ + public static enum AlterMaterializedViewTypes { + UPDATE_REWRITE_FLAG + }; + + private AlterMaterializedViewTypes op; + + public AlterMaterializedViewDesc(AlterMaterializedViewTypes type) { + this.op = type; + } + + @Explain(displayName = "operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getOpString() { + return op.toString(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteDesc.java new file mode 100644 index 0000000000..42dddbab43 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteDesc.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.alter; + +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for the ALTER MATERIALIZED VIEW commands. + */ +@Explain(displayName = "Alter Materialized View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterMaterializedViewRewriteDesc extends AlterMaterializedViewDesc { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(AlterMaterializedViewRewriteDesc.class, AlterMaterializedViewRewriteOperation.class); + } + + private final String fqMaterializedViewName; + private final boolean rewriteEnable; + + public AlterMaterializedViewRewriteDesc(String fqMaterializedViewName, boolean rewriteEnable) { + super(AlterMaterializedViewTypes.UPDATE_REWRITE_FLAG); + this.fqMaterializedViewName = fqMaterializedViewName; + this.rewriteEnable = rewriteEnable; + } + + @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getMaterializedViewName() { + return fqMaterializedViewName; + } + + public boolean isRewriteEnable() { + return rewriteEnable; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteOperation.java new file mode 100644 index 0000000000..1c1779ea59 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/alter/AlterMaterializedViewRewriteOperation.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.alter; + +import org.apache.calcite.rel.RelNode; +import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.metastore.api.EnvironmentContext; +import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.CalcitePlanner; +import org.apache.hadoop.hive.ql.parse.ParseUtils; + +/** + * Operation process of enabling/disabling materialized view rewrite. + */ +public class AlterMaterializedViewRewriteOperation extends DDLOperation { + private final AlterMaterializedViewRewriteDesc desc; + + public AlterMaterializedViewRewriteOperation(DDLOperationContext context, AlterMaterializedViewRewriteDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + Table mv = context.getDb().getTable(desc.getMaterializedViewName()); + if (mv.isRewriteEnabled() == desc.isRewriteEnable()) { + // This is a noop, return successfully + return 0; + } + + if (desc.isRewriteEnable()) { + try { + QueryState qs = new QueryState.Builder().withHiveConf(context.getConf()).build(); + CalcitePlanner planner = new CalcitePlanner(qs); + Context ctx = new Context(context.getConf()); + ctx.setIsLoadingMaterializedView(true); + planner.initCtx(ctx); + planner.init(false); + + RelNode plan = planner.genLogicalPlan(ParseUtils.parse(mv.getViewExpandedText())); + if (plan == null) { + String msg = "Cannot enable automatic rewriting for materialized view."; + if (ctx.getCboInfo() != null) { + msg += " " + ctx.getCboInfo(); + } + throw new HiveException(msg); + } + if (!planner.isValidAutomaticRewritingMaterialization()) { + throw new HiveException("Cannot enable rewriting for materialized view. " + + planner.getInvalidAutomaticRewritingMaterializationReason()); + } + } catch (Exception e) { + throw new HiveException(e); + } + } + + Table newMV = mv.copy(); // Do not mess with Table instance + newMV.setRewriteEnabled(desc.isRewriteEnable()); + EnvironmentContext environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); + context.getDb().alterTable(newMV, false, environmentContext, true); + + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewDesc.java similarity index 89% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewDesc.java index b693fdb845..cfebf3dc74 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.table; import java.io.Serializable; import java.util.List; @@ -28,38 +28,44 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.exec.DDLTask; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; +import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - /** - * CreateViewDesc. - * + * DDL task description for CREATE VIEW commands. */ @Explain(displayName = "Create View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class CreateViewDesc extends DDLDesc implements Serializable { +public class CreateViewDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - private static Logger LOG = LoggerFactory.getLogger(CreateViewDesc.class); + private static final Logger LOG = LoggerFactory.getLogger(CreateViewDesc.class); + + static { + DDLTask2.registerOperation(CreateViewDesc.class, CreateViewOperation.class); + } private String viewName; - private String originalText; - private String expandedText; - private boolean rewriteEnabled; private List schema; + private String comment; private Map tblProps; private List partColNames; - private List partCols; - private String comment; private boolean ifNotExists; - private boolean replace; + private boolean orReplace; + + private String originalText; + private String expandedText; + private boolean rewriteEnabled; + private List partCols; private boolean isAlterViewAs; private boolean isMaterialized; private String inputFormat; @@ -72,41 +78,21 @@ private ReplicationSpec replicationSpec = null; private String ownerName = null; - /** - * For serialization only. - */ - public CreateViewDesc() { - } - /** * Used to create a materialized view descriptor - * @param viewName - * @param schema - * @param comment - * @param tblProps - * @param partColNames - * @param ifNotExists - * @param replace - * @param isAlterViewAs - * @param inputFormat - * @param outputFormat - * @param location - * @param serde - * @param storageHandler - * @param serdeProps */ - public CreateViewDesc(String viewName, List schema, String comment, - Map tblProps, List partColNames, - boolean ifNotExists, boolean replace, boolean rewriteEnabled, boolean isAlterViewAs, + public CreateViewDesc(String viewName, List schema, String comment, Map tblProps, + List partColNames, boolean ifNotExists, boolean orReplace, boolean rewriteEnabled, boolean isAlterViewAs, String inputFormat, String outputFormat, String location, String serde, String storageHandler, Map serdeProps) { this.viewName = viewName; this.schema = schema; + this.comment = comment; this.tblProps = tblProps; this.partColNames = partColNames; - this.comment = comment; this.ifNotExists = ifNotExists; - this.replace = replace; + this.orReplace = orReplace; + this.isMaterialized = true; this.rewriteEnabled = rewriteEnabled; this.isAlterViewAs = isAlterViewAs; @@ -120,29 +106,19 @@ public CreateViewDesc(String viewName, List schema, String comment, /** * Used to create a view descriptor - * @param viewName - * @param schema - * @param comment - * @param tblProps - * @param partColNames - * @param ifNotExists - * @param orReplace - * @param isAlterViewAs - * @param inputFormat - * @param outputFormat - * @param serde */ - public CreateViewDesc(String viewName, List schema, String comment, - Map tblProps, List partColNames, - boolean ifNotExists, boolean orReplace, boolean isAlterViewAs, + public CreateViewDesc(String viewName, List schema, String comment, Map tblProps, + List partColNames, boolean ifNotExists, boolean orReplace, + boolean isAlterViewAs, String inputFormat, String outputFormat, String serde) { this.viewName = viewName; this.schema = schema; + this.comment = comment; this.tblProps = tblProps; this.partColNames = partColNames; - this.comment = comment; this.ifNotExists = ifNotExists; - this.replace = orReplace; + this.orReplace = orReplace; + this.isAlterViewAs = isAlterViewAs; this.isMaterialized = false; this.rewriteEnabled = false; @@ -258,11 +234,11 @@ public void setTablesUsed(Set tablesUsed) { @Explain(displayName = "replace", displayOnlyOnTrue = true) public boolean isReplace() { - return replace; + return orReplace; } public void setReplace(boolean replace) { - this.replace = replace; + this.orReplace = replace; } @Explain(displayName = "is alter view as select", displayOnlyOnTrue = true) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewOperation.java new file mode 100644 index 0000000000..bb9b18977b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/CreateViewOperation.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.ValidTxnWriteIdList; +import org.apache.hadoop.hive.metastore.api.CreationMetadata; +import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; + +import com.google.common.collect.ImmutableSet; + +/** + * Operation process of creating a view. + */ +public class CreateViewOperation extends DDLOperation { + private final CreateViewDesc desc; + + public CreateViewOperation(DDLOperationContext context, CreateViewDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + Table oldview = context.getDb().getTable(desc.getViewName(), false); + if (oldview != null) { + // Check whether we are replicating + if (desc.getReplicationSpec().isInReplicationScope()) { + // if this is a replication spec, then replace-mode semantics might apply. + if (desc.getReplicationSpec().allowEventReplacementInto(oldview.getParameters())){ + desc.setReplace(true); // we replace existing view. + } else { + LOG.debug("DDLTask: Create View is skipped as view {} is newer than update", + desc.getViewName()); // no replacement, the existing table state is newer than our update. + return 0; + } + } + + if (!desc.isReplace() && !desc.getIfNotExists()) { + // View already exists, thus we should be replacing + throw new HiveException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(desc.getViewName())); + } + + // It should not be a materialized view + assert !desc.isMaterialized(); + + // replace existing view + // remove the existing partition columns from the field schema + oldview.setViewOriginalText(desc.getViewOriginalText()); + oldview.setViewExpandedText(desc.getViewExpandedText()); + oldview.setFields(desc.getSchema()); + if (desc.getComment() != null) { + oldview.setProperty("comment", desc.getComment()); + } + if (desc.getTblProps() != null) { + oldview.getTTable().getParameters().putAll(desc.getTblProps()); + } + oldview.setPartCols(desc.getPartCols()); + if (desc.getInputFormat() != null) { + oldview.setInputFormatClass(desc.getInputFormat()); + } + if (desc.getOutputFormat() != null) { + oldview.setOutputFormatClass(desc.getOutputFormat()); + } + oldview.checkValidity(null); + if (desc.getOwnerName() != null) { + oldview.setOwner(desc.getOwnerName()); + } + context.getDb().alterTable(desc.getViewName(), oldview, false, null, true); + DDLUtils.addIfAbsentByName(new WriteEntity(oldview, WriteEntity.WriteType.DDL_NO_LOCK), + context.getWork().getOutputs()); + } else { + // We create new view + Table tbl = desc.toTable(context.getConf()); + // We set the signature for the view if it is a materialized view + if (tbl.isMaterializedView()) { + CreationMetadata cm = + new CreationMetadata(MetaStoreUtils.getDefaultCatalog(context.getConf()), tbl.getDbName(), + tbl.getTableName(), ImmutableSet.copyOf(desc.getTablesUsed())); + cm.setValidTxnList(context.getConf().get(ValidTxnWriteIdList.VALID_TABLES_WRITEIDS_KEY)); + tbl.getTTable().setCreationMetadata(cm); + } + context.getDb().createTable(tbl, desc.getIfNotExists()); + DDLUtils.addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK), + context.getWork().getOutputs()); + + //set lineage info + DataContainer dc = new DataContainer(tbl.getTTable()); + context.getQueryState().getLineageState().setLineage(new Path(desc.getViewName()), dc, tbl.getCols()); + } + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index f4281bdd7b..ed797fc5dc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -42,11 +42,9 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; -import org.apache.calcite.rel.RelNode; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -54,7 +52,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; -import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.DefaultHiveMetaHook; @@ -66,7 +63,6 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.CompactionResponse; -import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -92,7 +88,6 @@ import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.txn.TxnStore; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; @@ -104,7 +99,6 @@ import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager; import org.apache.hadoop.hive.ql.exec.tez.TezTask; import org.apache.hadoop.hive.ql.exec.tez.WorkloadManager; -import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.AcidUtils; @@ -131,16 +125,13 @@ import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; -import org.apache.hadoop.hive.ql.parse.CalcitePlanner; import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; -import org.apache.hadoop.hive.ql.parse.ParseUtils; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.dump.Utils; import org.apache.hadoop.hive.ql.plan.AbortTxnsDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; -import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc; import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; @@ -153,7 +144,6 @@ import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc; import org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc; import org.apache.hadoop.hive.ql.plan.CreateResourcePlanDesc; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.CreateWMTriggerDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; @@ -308,11 +298,6 @@ public int execute(DriverContext driverContext) { } } - CreateViewDesc crtView = work.getCreateViewDesc(); - if (crtView != null) { - return createView(db, crtView); - } - AddPartitionDesc addPartitionDesc = work.getAddPartitionDesc(); if (addPartitionDesc != null) { return addPartitions(db, addPartitionDesc); @@ -493,10 +478,6 @@ public int execute(DriverContext driverContext) { return createOrDropTriggerToPoolMapping(db, work.getTriggerToPoolMappingDesc()); } - if (work.getAlterMaterializedViewDesc() != null) { - return alterMaterializedView(db, work.getAlterMaterializedViewDesc()); - } - if (work.getReplSetFirstIncLoadFlagDesc() != null) { return remFirstIncPendFlag(db, work.getReplSetFirstIncLoadFlagDesc()); } @@ -999,68 +980,6 @@ private void writeListToFileAfterSort(List entries, String resFile) thro writeToFile(sb.toString(), resFile); } - /** - * Alters a materialized view. - * - * @param db - * Database that the materialized view belongs to. - * @param alterMVDesc - * Descriptor of the changes. - * @return Returns 0 when execution succeeds and above 0 if it fails. - * @throws HiveException - * @throws InvalidOperationException - */ - private int alterMaterializedView(Hive db, AlterMaterializedViewDesc alterMVDesc) throws HiveException { - String mvName = alterMVDesc.getMaterializedViewName(); - // It can be fully qualified name or use default database - Table oldMV = db.getTable(mvName); - Table mv = oldMV.copy(); // Do not mess with Table instance - EnvironmentContext environmentContext = new EnvironmentContext(); - environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); - - switch (alterMVDesc.getOp()) { - case UPDATE_REWRITE_FLAG: - if (mv.isRewriteEnabled() == alterMVDesc.isRewriteEnable()) { - // This is a noop, return successfully - return 0; - } - if (alterMVDesc.isRewriteEnable()) { - try { - final QueryState qs = - new QueryState.Builder().withHiveConf(conf).build(); - final CalcitePlanner planner = new CalcitePlanner(qs); - final Context ctx = new Context(conf); - ctx.setIsLoadingMaterializedView(true); - planner.initCtx(ctx); - planner.init(false); - final RelNode plan = planner.genLogicalPlan(ParseUtils.parse(mv.getViewExpandedText())); - if (plan == null) { - String msg = "Cannot enable automatic rewriting for materialized view."; - if (ctx.getCboInfo() != null) { - msg += " " + ctx.getCboInfo(); - } - throw new HiveException(msg); - } - if (!planner.isValidAutomaticRewritingMaterialization()) { - throw new HiveException("Cannot enable rewriting for materialized view. " + - planner.getInvalidAutomaticRewritingMaterializationReason()); - } - } catch (Exception e) { - throw new HiveException(e); - } - } - mv.setRewriteEnabled(alterMVDesc.isRewriteEnable()); - break; - - default: - throw new AssertionError("Unsupported alter materialized view type! : " + alterMVDesc.getOp()); - } - - db.alterTable(mv, false, environmentContext, true); - - return 0; - } - /** * Add a partitions to a table. * @@ -3424,85 +3343,6 @@ public static void validateSerDe(String serdeName, HiveConf conf) throws HiveExc } } - /** - * Create a new view. - * - * @param db - * The database in question. - * @param crtView - * This is the view we're creating. - * @return Returns 0 when execution succeeds and above 0 if it fails. - * @throws HiveException - * Throws this exception if an unexpected error occurs. - */ - private int createView(Hive db, CreateViewDesc crtView) throws HiveException { - Table oldview = db.getTable(crtView.getViewName(), false); - if (oldview != null) { - // Check whether we are replicating - if (crtView.getReplicationSpec().isInReplicationScope()) { - // if this is a replication spec, then replace-mode semantics might apply. - if (crtView.getReplicationSpec().allowEventReplacementInto(oldview.getParameters())){ - crtView.setReplace(true); // we replace existing view. - } else { - LOG.debug("DDLTask: Create View is skipped as view {} is newer than update", - crtView.getViewName()); // no replacement, the existing table state is newer than our update. - return 0; - } - } - - if (!crtView.isReplace() && !crtView.getIfNotExists()) { - // View already exists, thus we should be replacing - throw new HiveException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(crtView.getViewName())); - } - - // It should not be a materialized view - assert !crtView.isMaterialized(); - - // replace existing view - // remove the existing partition columns from the field schema - oldview.setViewOriginalText(crtView.getViewOriginalText()); - oldview.setViewExpandedText(crtView.getViewExpandedText()); - oldview.setFields(crtView.getSchema()); - if (crtView.getComment() != null) { - oldview.setProperty("comment", crtView.getComment()); - } - if (crtView.getTblProps() != null) { - oldview.getTTable().getParameters().putAll(crtView.getTblProps()); - } - oldview.setPartCols(crtView.getPartCols()); - if (crtView.getInputFormat() != null) { - oldview.setInputFormatClass(crtView.getInputFormat()); - } - if (crtView.getOutputFormat() != null) { - oldview.setOutputFormatClass(crtView.getOutputFormat()); - } - oldview.checkValidity(null); - if (crtView.getOwnerName() != null) { - oldview.setOwner(crtView.getOwnerName()); - } - db.alterTable(crtView.getViewName(), oldview, false, null, true); - addIfAbsentByName(new WriteEntity(oldview, WriteEntity.WriteType.DDL_NO_LOCK)); - } else { - // We create new view - Table tbl = crtView.toTable(conf); - // We set the signature for the view if it is a materialized view - if (tbl.isMaterializedView()) { - CreationMetadata cm = - new CreationMetadata(MetaStoreUtils.getDefaultCatalog(conf), tbl.getDbName(), - tbl.getTableName(), ImmutableSet.copyOf(crtView.getTablesUsed())); - cm.setValidTxnList(conf.get(ValidTxnWriteIdList.VALID_TABLES_WRITEIDS_KEY)); - tbl.getTTable().setCreationMetadata(cm); - } - db.createTable(tbl, crtView.getIfNotExists()); - addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK)); - - //set lineage info - DataContainer dc = new DataContainer(tbl.getTTable()); - queryState.getLineageState().setLineage(new Path(crtView.getViewName()), dc, tbl.getCols()); - } - return 0; - } - private int exchangeTablePartition(Hive db, AlterTableExchangePartition exchangePartition) throws HiveException { Map partitionSpecs = exchangePartition.getPartitionSpecs(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index baf635633d..9741a66202 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -71,6 +71,7 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.alter.AlterMaterializedViewRewriteDesc; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.DescDatabaseDesc; @@ -120,8 +121,6 @@ import org.apache.hadoop.hive.ql.plan.AbortTxnsDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc.OnePartitionDesc; -import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc; -import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc.AlterMaterializedViewTypes; import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; @@ -4398,10 +4397,7 @@ private void analyzeAlterMaterializedViewRewrite(String fqMvName, ASTNode ast) t throw new SemanticException("Invalid alter materialized view expression"); } - AlterMaterializedViewDesc alterMVDesc = - new AlterMaterializedViewDesc(AlterMaterializedViewTypes.UPDATE_REWRITE_FLAG); - alterMVDesc.setFqMaterializedViewName(fqMvName); - alterMVDesc.setRewriteEnableFlag(enableFlag); + AlterMaterializedViewRewriteDesc alterMVRewriteDesc = new AlterMaterializedViewRewriteDesc(fqMvName, enableFlag); // It can be fully qualified name or use default database Table materializedViewTable = getTable(fqMvName, true); @@ -4418,14 +4414,9 @@ private void analyzeAlterMaterializedViewRewrite(String fqMvName, ASTNode ast) t } } - if (AcidUtils.isTransactionalTable(materializedViewTable)) { - setAcidDdlDesc(alterMVDesc); - } - inputs.add(new ReadEntity(materializedViewTable)); outputs.add(new WriteEntity(materializedViewTable, WriteEntity.WriteType.DDL_EXCLUSIVE)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterMVDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterMVRewriteDesc))); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java index 77e181863e..c4e6e5cc53 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.QueryProperties; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.GroupByOperator; @@ -44,7 +45,6 @@ import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc.SampleDesc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java index 0405ee8f02..4c1e2a27cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java @@ -31,8 +31,8 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; /** * Implementation of the query block. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 1e79f325f8..b4b5ebd374 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -102,6 +102,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLWork2; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; import org.apache.hadoop.hive.ql.ddl.table.CreateTableLikeDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.ddl.table.PreInsertTableDesc; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ArchiveUtils; @@ -195,7 +196,6 @@ import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; @@ -13750,8 +13750,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt dbDotTable, cols, comment, tblProps, partColNames, ifNotExists, orReplace, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), storageFormat.getSerde()); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - createVwDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), createVwDesc))); addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATEVIEW); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 0b6ff524b1..67d27cdd87 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -32,9 +32,12 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.alter.AlterMaterializedViewRewriteDesc; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; -import org.apache.hadoop.hive.ql.exec.DDLTask; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.MaterializedViewDesc; @@ -61,8 +64,6 @@ import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec; import org.apache.hadoop.hive.ql.plan.BasicStatsWork; import org.apache.hadoop.hive.ql.plan.ColumnStatsDesc; -import org.apache.hadoop.hive.ql.plan.CreateViewDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.LoadFileDesc; @@ -118,7 +119,7 @@ public void init(QueryState queryState, LogHelper console, Hive db) { this.console = console; } - @SuppressWarnings({"nls", "unchecked"}) + @SuppressWarnings("nls") public void compile(final ParseContext pCtx, final List> rootTasks, final HashSet inputs, final HashSet outputs) throws SemanticException { @@ -364,7 +365,7 @@ public void compile(final ParseContext pCtx, } else if (pCtx.getQueryProperties().isMaterializedView()) { // generate a DDL task and make it a dependent task of the leaf CreateViewDesc viewDesc = pCtx.getCreateViewDesc(); - Task crtViewTask = TaskFactory.get(new DDLWork( + Task crtViewTask = TaskFactory.get(new DDLWork2( inputs, outputs, viewDesc)); patchUpAfterCTASorMaterializedView(rootTasks, outputs, crtViewTask, CollectionUtils.isEmpty(viewDesc.getPartColNames())); } else if (pCtx.getMaterializedViewUpdateDesc() != null) { @@ -544,28 +545,26 @@ private void patchUpAfterCTASorMaterializedView(final List dependentTask, Task loadFileWork, Map map, int outerQueryLimit, int numBitVector) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java deleted file mode 100644 index 865d1431d1..0000000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterMaterializedViewDesc.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; - -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - -/** - * AlterMaterializedViewDesc. - */ -@Explain(displayName = "Alter Materialized View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class AlterMaterializedViewDesc extends DDLDesc implements Serializable, DDLDescWithWriteId { - private static final long serialVersionUID = 1L; - private String fqMaterializedViewName; - private boolean rewriteEnable; - - /** - * alterMVTypes. - * - */ - public static enum AlterMaterializedViewTypes { - UPDATE_REWRITE_FLAG - }; - - AlterMaterializedViewTypes op; - private long writeId; - - public AlterMaterializedViewDesc() { - } - - public AlterMaterializedViewDesc(AlterMaterializedViewTypes type) { - this.op = type; - } - - /** - * @return the name of the materializedViewName - */ - @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getMaterializedViewName() { - return fqMaterializedViewName; - } - - /** - * @param materializedViewName - * the materializedViewName to set - */ - public void setFqMaterializedViewName(String materializedViewName) { - this.fqMaterializedViewName = materializedViewName; - } - - /** - * @return the rewrite flag - */ - public boolean isRewriteEnable() { - return rewriteEnable; - } - - /** - * @param rewriteEnable - * the value for the flag - */ - public void setRewriteEnableFlag(boolean rewriteEnable) { - this.rewriteEnable = rewriteEnable; - } - - /** - * @return the op - */ - @Explain(displayName = "operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getOpString() { - return op.toString(); - } - - /** - * @return the op - */ - public AlterMaterializedViewTypes getOp() { - return op; - } - - /** - * @param op - * the op to set - */ - public void setOp(AlterMaterializedViewTypes op) { - this.op = op; - } - - @Override - public void setWriteId(long writeId) { - this.writeId = writeId; - } - - @Override - public String getFullTableName() { - return fqMaterializedViewName; - } - - @Override - public boolean mayNeedWriteId() { - return true; // Verified when this is set as DDL Desc for ACID. - } - -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index 2b653a5d21..dac689dbed 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.plan; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; @@ -34,8 +35,6 @@ // TODO: this can probably be replaced with much less code via dynamic dispatch and/or templates. private InsertCommitHookDesc insertCommitHookDesc; - private AlterMaterializedViewDesc alterMVDesc; - private CreateViewDesc createVwDesc; private DropPartitionDesc dropPartitionDesc; private AlterTableDesc alterTblDesc; private ShowColumnsDesc showColumnsDesc; @@ -118,27 +117,6 @@ public DDLWork(HashSet inputs, HashSet outputs, this.alterTblDesc = alterTblDesc; } - /** - * @param alterMVDesc - * alter materialized view descriptor - */ - public DDLWork(HashSet inputs, HashSet outputs, - AlterMaterializedViewDesc alterMVDesc) { - this(inputs, outputs); - this.alterMVDesc = alterMVDesc; - } - - /** - * @param createVwDesc - * create view descriptor - */ - public DDLWork(HashSet inputs, HashSet outputs, - CreateViewDesc createVwDesc) { - this(inputs, outputs); - - this.createVwDesc = createVwDesc; - } - /** * @param dropTblDesc * drop table descriptor @@ -404,14 +382,6 @@ public DDLWork(HashSet inputs, HashSet outputs, this.replSetFirstIncLoadFlagDesc = replSetFirstIncLoadFlagDesc; } - /** - * @return the createTblDesc - */ - @Explain(displayName = "Create View Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public CreateViewDesc getCreateViewDesc() { - return createVwDesc; - } - /** * @return the dropTblDesc */ @@ -428,15 +398,6 @@ public AlterTableDesc getAlterTblDesc() { return alterTblDesc; } - - /** - * @return the alterMVDesc - */ - @Explain(displayName = "Alter Materialized View Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public AlterMaterializedViewDesc getAlterMaterializedViewDesc() { - return alterMVDesc; - } - /** * @return the showColumnsDesc */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index 381c3b54a5..8db5d33868 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.ddl.DDLWork2; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -327,7 +328,7 @@ public String getDatabaseName() { case TABLE: return TaskFactory.get(new DDLWork2(inputs, outputs, createTblDesc), conf); case VIEW: - return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), conf); + return TaskFactory.get(new DDLWork2(inputs, outputs, createViewDesc), conf); } return null; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java index 3abdc4859f..5d4e93e74a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java @@ -22,6 +22,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.AcidUtils; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index b668e40594..15e922ebd5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.ddl.table.CreateTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.TableScanOperator;