diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 440d761f03..3e13785b94 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -658,7 +658,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal LOCALMODEMAXINPUTFILES("hive.exec.mode.local.auto.input.files.max", 4, "When hive.exec.mode.local.auto is true, the number of tasks should less than this for local mode."), - DROPIGNORESNONEXISTENT("hive.exec.drop.ignorenonexistent", true, + DROP_IGNORES_NON_EXISTENT("hive.exec.drop.ignorenonexistent", true, "Do not report an error if DROP TABLE/VIEW/Index/Function specifies a non-existent table/view/function"), HIVEIGNOREMAPJOINHINT("hive.ignore.mapjoin.hint", true, "Ignore the mapjoin hint"), @@ -5628,7 +5628,7 @@ private void setupSQLStdAuthWhiteList() { private static String getSQLStdAuthDefaultWhiteListPattern() { // create the default white list from list of safe config params // and regex list - String confVarPatternStr = Joiner.on("|").join(convertVarsToRegex(sqlStdAuthSafeVarNames)); + String confVarPatternStr = Joiner.on("|").join(convertVarsToRegex(SQL_STD_AUTH_SAFE_VAR_NAMES)); String regexPatternStr = Joiner.on("|").join(sqlStdAuthSafeVarNameRegexes); return regexPatternStr + "|" + confVarPatternStr; } @@ -5657,75 +5657,75 @@ public ZoneId getLocalTimeZone() { * Default list of modifiable config parameters for sql standard authorization * For internal use only. */ - private static final String [] sqlStdAuthSafeVarNames = new String [] { - ConfVars.AGGR_JOIN_TRANSPOSE.varname, - ConfVars.BYTESPERREDUCER.varname, - ConfVars.CLIENT_STATS_COUNTERS.varname, - ConfVars.DEFAULTPARTITIONNAME.varname, - ConfVars.DROPIGNORESNONEXISTENT.varname, - ConfVars.HIVECOUNTERGROUP.varname, - ConfVars.HIVEDEFAULTMANAGEDFILEFORMAT.varname, - ConfVars.HIVEENFORCEBUCKETMAPJOIN.varname, - ConfVars.HIVEENFORCESORTMERGEBUCKETMAPJOIN.varname, - ConfVars.HIVEEXPREVALUATIONCACHE.varname, - ConfVars.HIVEQUERYRESULTFILEFORMAT.varname, - ConfVars.HIVEHASHTABLELOADFACTOR.varname, - ConfVars.HIVEHASHTABLETHRESHOLD.varname, - ConfVars.HIVEIGNOREMAPJOINHINT.varname, - ConfVars.HIVELIMITMAXROWSIZE.varname, - ConfVars.HIVEMAPREDMODE.varname, - ConfVars.HIVEMAPSIDEAGGREGATE.varname, - ConfVars.HIVEOPTIMIZEMETADATAQUERIES.varname, - ConfVars.HIVEROWOFFSET.varname, - ConfVars.HIVEVARIABLESUBSTITUTE.varname, - ConfVars.HIVEVARIABLESUBSTITUTEDEPTH.varname, - ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME.varname, - ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL.varname, - ConfVars.HIVE_CHECK_CROSS_PRODUCT.varname, - ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC.varname, - ConfVars.HIVE_COMPAT.varname, - ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY.varname, - ConfVars.HIVE_ERROR_ON_EMPTY_PARTITION.varname, - ConfVars.HIVE_EXECUTION_ENGINE.varname, - ConfVars.HIVE_EXEC_COPYFILE_MAXSIZE.varname, - ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname, - ConfVars.HIVE_FILE_MAX_FOOTER.varname, - ConfVars.HIVE_INSERT_INTO_MULTILEVEL_DIRS.varname, - ConfVars.HIVE_LOCALIZE_RESOURCE_NUM_WAIT_ATTEMPTS.varname, - ConfVars.HIVE_MULTI_INSERT_MOVE_TASKS_SHARE_DEPENDENCIES.varname, - ConfVars.HIVE_QUERY_RESULTS_CACHE_ENABLED.varname, - ConfVars.HIVE_QUERY_RESULTS_CACHE_WAIT_FOR_PENDING_RESULTS.varname, - ConfVars.HIVE_QUOTEDID_SUPPORT.varname, - ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES.varname, - ConfVars.HIVE_STATS_COLLECT_PART_LEVEL_STATS.varname, - ConfVars.HIVE_SCHEMA_EVOLUTION.varname, - ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, - ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS.varname, - ConfVars.HIVE_SUPPORT_SPECICAL_CHARACTERS_IN_TABLE_NAMES.varname, - ConfVars.JOB_DEBUG_CAPTURE_STACKTRACES.varname, - ConfVars.JOB_DEBUG_TIMEOUT.varname, - ConfVars.LLAP_IO_ENABLED.varname, - ConfVars.LLAP_IO_USE_FILEID_PATH.varname, - ConfVars.LLAP_DAEMON_SERVICE_HOSTS.varname, - ConfVars.LLAP_EXECUTION_MODE.varname, - ConfVars.LLAP_AUTO_ALLOW_UBER.varname, - ConfVars.LLAP_AUTO_ENFORCE_TREE.varname, - ConfVars.LLAP_AUTO_ENFORCE_VECTORIZED.varname, - ConfVars.LLAP_AUTO_ENFORCE_STATS.varname, - ConfVars.LLAP_AUTO_MAX_INPUT.varname, - ConfVars.LLAP_AUTO_MAX_OUTPUT.varname, - ConfVars.LLAP_SKIP_COMPILE_UDF_CHECK.varname, - ConfVars.LLAP_CLIENT_CONSISTENT_SPLITS.varname, - ConfVars.LLAP_ENABLE_GRACE_JOIN_IN_LLAP.varname, - ConfVars.LLAP_ALLOW_PERMANENT_FNS.varname, - ConfVars.MAXCREATEDFILES.varname, - ConfVars.MAXREDUCERS.varname, - ConfVars.NWAYJOINREORDER.varname, - ConfVars.OUTPUT_FILE_EXTENSION.varname, - ConfVars.SHOW_JOB_FAIL_DEBUG_INFO.varname, - ConfVars.TASKLOG_DEBUG_TIMEOUT.varname, - ConfVars.HIVEQUERYID.varname, - ConfVars.HIVEQUERYTAG.varname, + private static final String[] SQL_STD_AUTH_SAFE_VAR_NAMES = new String[] { + ConfVars.AGGR_JOIN_TRANSPOSE.varname, + ConfVars.BYTESPERREDUCER.varname, + ConfVars.CLIENT_STATS_COUNTERS.varname, + ConfVars.DEFAULTPARTITIONNAME.varname, + ConfVars.DROP_IGNORES_NON_EXISTENT.varname, + ConfVars.HIVECOUNTERGROUP.varname, + ConfVars.HIVEDEFAULTMANAGEDFILEFORMAT.varname, + ConfVars.HIVEENFORCEBUCKETMAPJOIN.varname, + ConfVars.HIVEENFORCESORTMERGEBUCKETMAPJOIN.varname, + ConfVars.HIVEEXPREVALUATIONCACHE.varname, + ConfVars.HIVEQUERYRESULTFILEFORMAT.varname, + ConfVars.HIVEHASHTABLELOADFACTOR.varname, + ConfVars.HIVEHASHTABLETHRESHOLD.varname, + ConfVars.HIVEIGNOREMAPJOINHINT.varname, + ConfVars.HIVELIMITMAXROWSIZE.varname, + ConfVars.HIVEMAPREDMODE.varname, + ConfVars.HIVEMAPSIDEAGGREGATE.varname, + ConfVars.HIVEOPTIMIZEMETADATAQUERIES.varname, + ConfVars.HIVEROWOFFSET.varname, + ConfVars.HIVEVARIABLESUBSTITUTE.varname, + ConfVars.HIVEVARIABLESUBSTITUTEDEPTH.varname, + ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME.varname, + ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL.varname, + ConfVars.HIVE_CHECK_CROSS_PRODUCT.varname, + ConfVars.HIVE_CLI_TEZ_SESSION_ASYNC.varname, + ConfVars.HIVE_COMPAT.varname, + ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY.varname, + ConfVars.HIVE_ERROR_ON_EMPTY_PARTITION.varname, + ConfVars.HIVE_EXECUTION_ENGINE.varname, + ConfVars.HIVE_EXEC_COPYFILE_MAXSIZE.varname, + ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname, + ConfVars.HIVE_FILE_MAX_FOOTER.varname, + ConfVars.HIVE_INSERT_INTO_MULTILEVEL_DIRS.varname, + ConfVars.HIVE_LOCALIZE_RESOURCE_NUM_WAIT_ATTEMPTS.varname, + ConfVars.HIVE_MULTI_INSERT_MOVE_TASKS_SHARE_DEPENDENCIES.varname, + ConfVars.HIVE_QUERY_RESULTS_CACHE_ENABLED.varname, + ConfVars.HIVE_QUERY_RESULTS_CACHE_WAIT_FOR_PENDING_RESULTS.varname, + ConfVars.HIVE_QUOTEDID_SUPPORT.varname, + ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES.varname, + ConfVars.HIVE_STATS_COLLECT_PART_LEVEL_STATS.varname, + ConfVars.HIVE_SCHEMA_EVOLUTION.varname, + ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, + ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS.varname, + ConfVars.HIVE_SUPPORT_SPECICAL_CHARACTERS_IN_TABLE_NAMES.varname, + ConfVars.JOB_DEBUG_CAPTURE_STACKTRACES.varname, + ConfVars.JOB_DEBUG_TIMEOUT.varname, + ConfVars.LLAP_IO_ENABLED.varname, + ConfVars.LLAP_IO_USE_FILEID_PATH.varname, + ConfVars.LLAP_DAEMON_SERVICE_HOSTS.varname, + ConfVars.LLAP_EXECUTION_MODE.varname, + ConfVars.LLAP_AUTO_ALLOW_UBER.varname, + ConfVars.LLAP_AUTO_ENFORCE_TREE.varname, + ConfVars.LLAP_AUTO_ENFORCE_VECTORIZED.varname, + ConfVars.LLAP_AUTO_ENFORCE_STATS.varname, + ConfVars.LLAP_AUTO_MAX_INPUT.varname, + ConfVars.LLAP_AUTO_MAX_OUTPUT.varname, + ConfVars.LLAP_SKIP_COMPILE_UDF_CHECK.varname, + ConfVars.LLAP_CLIENT_CONSISTENT_SPLITS.varname, + ConfVars.LLAP_ENABLE_GRACE_JOIN_IN_LLAP.varname, + ConfVars.LLAP_ALLOW_PERMANENT_FNS.varname, + ConfVars.MAXCREATEDFILES.varname, + ConfVars.MAXREDUCERS.varname, + ConfVars.NWAYJOINREORDER.varname, + ConfVars.OUTPUT_FILE_EXTENSION.varname, + ConfVars.SHOW_JOB_FAIL_DEBUG_INFO.varname, + ConfVars.TASKLOG_DEBUG_TIMEOUT.varname, + ConfVars.HIVEQUERYID.varname, + ConfVars.HIVEQUERYTAG.varname, }; /** diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableDesc.java index 6687a4b554..e1a1faba17 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableDesc.java @@ -20,7 +20,6 @@ import java.io.Serializable; -import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; @@ -34,23 +33,20 @@ private static final long serialVersionUID = 1L; private final String tableName; - private final TableType expectedType; private final boolean ifExists; - private final boolean ifPurge; + private final boolean purge; private final ReplicationSpec replicationSpec; private final boolean validationRequired; - public DropTableDesc(String tableName, TableType expectedType, boolean ifExists, boolean ifPurge, - ReplicationSpec replicationSpec) { - this(tableName, expectedType, ifExists, ifPurge, replicationSpec, true); + public DropTableDesc(String tableName, boolean ifExists, boolean ifPurge, ReplicationSpec replicationSpec) { + this(tableName, ifExists, ifPurge, replicationSpec, true); } - public DropTableDesc(String tableName, TableType expectedType, boolean ifExists, boolean ifPurge, - ReplicationSpec replicationSpec, boolean validationRequired) { + public DropTableDesc(String tableName, boolean ifExists, boolean purge, ReplicationSpec replicationSpec, + boolean validationRequired) { this.tableName = tableName; - this.expectedType = expectedType; this.ifExists = ifExists; - this.ifPurge = ifPurge; + this.purge = purge; this.replicationSpec = replicationSpec == null ? new ReplicationSpec() : replicationSpec; this.validationRequired = validationRequired; } @@ -60,20 +56,12 @@ public String getTableName() { return tableName; } - public boolean getExpectView() { - return expectedType != null && expectedType == TableType.VIRTUAL_VIEW; - } - - public boolean getExpectMaterializedView() { - return expectedType != null && expectedType == TableType.MATERIALIZED_VIEW; - } - - public boolean getIfExists() { + public boolean isIfExists() { return ifExists; } - public boolean getIfPurge() { - return ifPurge; + public boolean isPurge() { + return purge; } /** @@ -81,10 +69,10 @@ public boolean getIfPurge() { * This can result in a "DROP IF OLDER THAN" kind of semantic */ public ReplicationSpec getReplicationSpec(){ - return this.replicationSpec; + return replicationSpec; } public boolean getValidationRequired(){ - return this.validationRequired; + return validationRequired; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableOperation.java index 80fa4c7bf5..8d852cd40c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableOperation.java @@ -18,13 +18,12 @@ package org.apache.hadoop.hive.ql.ddl.table.creation; -import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.DDLUtils; import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.HiveMaterializedViewsRegistry; import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.PartitionIterable; @@ -43,54 +42,25 @@ public DropTableOperation(DDLOperationContext context, DropTableDesc desc) { @Override public int execute() throws HiveException { - Table tbl = null; - try { - tbl = context.getDb().getTable(desc.getTableName()); - } catch (InvalidTableException e) { - // drop table is idempotent + Table table = getTable(); + if (table == null) { + return 0; // dropping not existing table is handled by DDLSemanticAnalyzer } - // This is a true DROP TABLE - if (tbl != null && desc.getValidationRequired()) { - if (tbl.isView()) { - if (!desc.getExpectView()) { - if (desc.getIfExists()) { - return 0; - } - if (desc.getExpectMaterializedView()) { - throw new HiveException("Cannot drop a view with DROP MATERIALIZED VIEW"); - } else { - throw new HiveException("Cannot drop a view with DROP TABLE"); - } - } - } else if (tbl.isMaterializedView()) { - if (!desc.getExpectMaterializedView()) { - if (desc.getIfExists()) { - return 0; - } - if (desc.getExpectView()) { - throw new HiveException("Cannot drop a materialized view with DROP VIEW"); - } else { - throw new HiveException("Cannot drop a materialized view with DROP TABLE"); - } - } - } else { - if (desc.getExpectView()) { - if (desc.getIfExists()) { - return 0; - } - throw new HiveException("Cannot drop a base table with DROP VIEW"); - } else if (desc.getExpectMaterializedView()) { - if (desc.getIfExists()) { - return 0; - } - throw new HiveException("Cannot drop a base table with DROP MATERIALIZED VIEW"); + if (desc.getValidationRequired()) { + if (table.isView() || table.isMaterializedView()) { + if (desc.isIfExists()) { + return 0; + } else if (table.isView()) { + throw new HiveException("Cannot drop a view with DROP TABLE"); + } else { + throw new HiveException("Cannot drop a materialized view with DROP TABLE"); } } } ReplicationSpec replicationSpec = desc.getReplicationSpec(); - if (tbl != null && replicationSpec.isInReplicationScope()) { + if (replicationSpec.isInReplicationScope()) { /** * DROP TABLE FOR REPLICATION behaves differently from DROP TABLE IF EXISTS - it more closely * matches a DROP TABLE IF OLDER THAN(x) semantic. @@ -112,15 +82,15 @@ public int execute() throws HiveException { * drop the partitions inside it that are older than this event. To wit, DROP TABLE FOR REPL * acts like a recursive DROP TABLE IF OLDER. */ - if (!replicationSpec.allowEventReplacementInto(tbl.getParameters())) { + if (!replicationSpec.allowEventReplacementInto(table.getParameters())) { // Drop occured as part of replicating a drop, but the destination // table was newer than the event being replicated. Ignore, but drop // any partitions inside that are older. - if (tbl.isPartitioned()) { - PartitionIterable partitions = new PartitionIterable(context.getDb(), tbl, null, - context.getConf().getIntVar(HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_MAX)); - for (Partition p : Iterables.filter(partitions, replicationSpec.allowEventReplacementInto())){ - context.getDb().dropPartition(tbl.getDbName(), tbl.getTableName(), p.getValues(), true); + if (table.isPartitioned()) { + PartitionIterable partitions = new PartitionIterable(context.getDb(), table, null, + MetastoreConf.getIntVar(context.getConf(), MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX)); + for (Partition p : Iterables.filter(partitions, replicationSpec.allowEventReplacementInto())) { + context.getDb().dropPartition(table.getDbName(), table.getTableName(), p.getValues(), true); } } LOG.debug("DDLTask: Drop Table is skipped as table {} is newer than update", desc.getTableName()); @@ -128,18 +98,18 @@ public int execute() throws HiveException { } } - // drop the table // TODO: API w/catalog name - context.getDb().dropTable(desc.getTableName(), desc.getIfPurge()); - if (tbl != null) { - // Remove from cache if it is a materialized view - if (tbl.isMaterializedView()) { - HiveMaterializedViewsRegistry.get().dropMaterializedView(tbl); - } - // We have already locked the table in DDLSemanticAnalyzer, don't do it again here - DDLUtils.addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK), context); - } + context.getDb().dropTable(desc.getTableName(), desc.isPurge()); + DDLUtils.addIfAbsentByName(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK), context); return 0; } + + private Table getTable() throws HiveException { + try { + return context.getDb().getTable(desc.getTableName()); + } catch (InvalidTableException e) { + return null; + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropMaterializedViewDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropMaterializedViewDesc.java new file mode 100644 index 0000000000..3f3f78daa4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropMaterializedViewDesc.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.view; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for DROP MATERIALIZED VIEW commands. + */ +@Explain(displayName = "Drop Materialized View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class DropMaterializedViewDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + private final String viewName; + private final boolean ifExists; + + public DropMaterializedViewDesc(String viewName, boolean ifExists) { + this.viewName = viewName; + this.ifExists = ifExists; + } + + @Explain(displayName = "view name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getTableName() { + return viewName; + } + + @Explain(displayName = "if exists", displayOnlyOnTrue = true, + explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public boolean isIfExists() { + return ifExists; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropMaterializedViewOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropMaterializedViewOperation.java new file mode 100644 index 0000000000..de09a55ca4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropMaterializedViewOperation.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.view; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveMaterializedViewsRegistry; +import org.apache.hadoop.hive.ql.metadata.InvalidTableException; +import org.apache.hadoop.hive.ql.metadata.Table; + +/** + * Operation process of dropping a materialized view. + */ +public class DropMaterializedViewOperation extends DDLOperation { + public DropMaterializedViewOperation(DDLOperationContext context, DropMaterializedViewDesc desc) { + super(context, desc); + } + + @Override + public int execute() throws HiveException { + Table table = getTable(); + if (table == null) { + return 0; // dropping not existing materialized view is handled by DDLSemanticAnalyzer + } + + if (!table.isMaterializedView()) { + if (desc.isIfExists()) { + return 0; + } else if (table.isView()) { + throw new HiveException("Cannot drop a view with DROP MATERIALIZED VIEW"); + } else { + throw new HiveException("Cannot drop a base table with DROP MATERIALIZED VIEW"); + } + } + + // TODO: API w/catalog name + context.getDb().dropTable(desc.getTableName(), false); + HiveMaterializedViewsRegistry.get().dropMaterializedView(table); + DDLUtils.addIfAbsentByName(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK), context); + + return 0; + } + + private Table getTable() throws HiveException { + try { + return context.getDb().getTable(desc.getTableName()); + } catch (InvalidTableException e) { + return null; + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropViewDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropViewDesc.java new file mode 100644 index 0000000000..0ffb007890 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropViewDesc.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.view; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for DROP VIEW commands. + */ +@Explain(displayName = "Drop View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class DropViewDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + private final String viewName; + private final boolean ifExists; + + public DropViewDesc(String viewName, boolean ifExists) { + this.viewName = viewName; + this.ifExists = ifExists; + } + + @Explain(displayName = "view name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getTableName() { + return viewName; + } + + @Explain(displayName = "if exists", displayOnlyOnTrue = true, + explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public boolean isIfExists() { + return ifExists; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropViewOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropViewOperation.java new file mode 100644 index 0000000000..26f3dedb8f --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/DropViewOperation.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.view; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.InvalidTableException; +import org.apache.hadoop.hive.ql.metadata.Table; + +/** + * Operation process of dropping a view. + */ +public class DropViewOperation extends DDLOperation { + public DropViewOperation(DDLOperationContext context, DropViewDesc desc) { + super(context, desc); + } + + @Override + public int execute() throws HiveException { + Table table = getTable(); + if (table == null) { + return 0; // dropping not existing view is handled by DDLSemanticAnalyzer + } + + if (!table.isView()) { + if (desc.isIfExists()) { + return 0; + } else if (table.isMaterializedView()) { + throw new HiveException("Cannot drop a materialized view with DROP VIEW"); + } else { + throw new HiveException("Cannot drop a base table with DROP VIEW"); + } + } + + // TODO: API w/catalog name + context.getDb().dropTable(desc.getTableName(), false); + DDLUtils.addIfAbsentByName(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK), context); + + return 0; + } + + private Table getTable() throws HiveException { + try { + return context.getDb().getTable(desc.getTableName()); + } catch (InvalidTableException e) { + return null; + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java index f238ac05c8..02993fce6a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java @@ -334,8 +334,7 @@ static TableLocationTuple tableLocation(ImportTableDesc tblDesc, Database parent private Task dropTableTask(Table table) { assert(table != null); - DropTableDesc dropTblDesc = new DropTableDesc(table.getFullyQualifiedName(), table.getTableType(), - true, false, event.replicationSpec()); + DropTableDesc dropTblDesc = new DropTableDesc(table.getFullyQualifiedName(), true, false, event.replicationSpec()); return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), dropTblDesc), context.hiveConf); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java index 2d53bae159..424027077a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java @@ -28,7 +28,6 @@ import org.antlr.runtime.tree.Tree; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -197,7 +196,7 @@ private void analyzeAcidExport(ASTNode ast) throws SemanticException { // Now make a task to drop temp table // {@link DDLSemanticAnalyzer#analyzeDropTable(ASTNode ast, TableType expectedType) ReplicationSpec replicationSpec = new ReplicationSpec(); - DropTableDesc dropTblDesc = new DropTableDesc(newTableName, TableType.MANAGED_TABLE, false, true, replicationSpec); + DropTableDesc dropTblDesc = new DropTableDesc(newTableName, false, true, replicationSpec); Task dropTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), dropTblDesc), conf); exportTask.addDependentTask(dropTask); markReadEntityForUpdate(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index ff7f9a8583..40330434af 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -148,6 +148,8 @@ import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableUnarchiveDesc; import org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableAddPartitionDesc.PartitionDesc; import org.apache.hadoop.hive.ql.ddl.view.AlterMaterializedViewRewriteDesc; +import org.apache.hadoop.hive.ql.ddl.view.DropMaterializedViewDesc; +import org.apache.hadoop.hive.ql.ddl.view.DropViewDesc; import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterPoolAddTriggerDesc; import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterPoolDropTriggerDesc; import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterResourcePlanDesc; @@ -399,7 +401,7 @@ public void analyzeInternal(ASTNode input) throws SemanticException { break; } case HiveParser.TOK_DROPTABLE: - analyzeDropTable(ast, null); + analyzeDropTable(ast); break; case HiveParser.TOK_TRUNCATETABLE: analyzeTruncateTable(ast); @@ -479,10 +481,10 @@ public void analyzeInternal(ASTNode input) throws SemanticException { analyzeMetastoreCheck(ast); break; case HiveParser.TOK_DROPVIEW: - analyzeDropTable(ast, TableType.VIRTUAL_VIEW); + analyzeDropView(ast); break; case HiveParser.TOK_DROP_MATERIALIZED_VIEW: - analyzeDropTable(ast, TableType.MATERIALIZED_VIEW); + analyzeDropMaterializedView(ast); break; case HiveParser.TOK_ALTERVIEW: { String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); @@ -1459,28 +1461,51 @@ private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc))); } - - - private void analyzeDropTable(ASTNode ast, TableType expectedType) - throws SemanticException { + private void analyzeDropTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null); - // we want to signal an error if the table/view doesn't exist and we're - // configured not to fail silently - boolean throwException = - !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT); + boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT); + + Table table = getTable(tableName, throwException); + if (table != null) { + inputs.add(new ReadEntity(table)); + outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_EXCLUSIVE)); + } + boolean purge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null); ReplicationSpec replicationSpec = new ReplicationSpec(ast); + DropTableDesc dropTableDesc = new DropTableDesc(tableName, ifExists, purge, replicationSpec); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTableDesc))); + } - Table tab = getTable(tableName, throwException); - if (tab != null) { - inputs.add(new ReadEntity(tab)); - outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_EXCLUSIVE)); + private void analyzeDropView(ASTNode ast) throws SemanticException { + String viewName = getUnescapedName((ASTNode) ast.getChild(0)); + boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null); + boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT); + + Table view = getTable(viewName, throwException); + if (view != null) { + inputs.add(new ReadEntity(view)); + outputs.add(new WriteEntity(view, WriteEntity.WriteType.DDL_EXCLUSIVE)); } - boolean ifPurge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null); - DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectedType, ifExists, ifPurge, replicationSpec); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc))); + DropViewDesc dropViewDesc = new DropViewDesc(viewName, ifExists); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropViewDesc))); + } + + private void analyzeDropMaterializedView(ASTNode ast) throws SemanticException { + String viewName = getUnescapedName((ASTNode) ast.getChild(0)); + boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null); + boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT); + + Table materializedView = getTable(viewName, throwException); + if (materializedView != null) { + inputs.add(new ReadEntity(materializedView)); + outputs.add(new WriteEntity(materializedView, WriteEntity.WriteType.DDL_EXCLUSIVE)); + } + + DropMaterializedViewDesc dropMaterializedViewDesc = new DropMaterializedViewDesc(viewName, ifExists); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropMaterializedViewDesc))); } private void analyzeTruncateTable(ASTNode ast) throws SemanticException { @@ -3335,7 +3360,7 @@ private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean throws SemanticException { boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null) - || HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT); + || HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT); // If the drop has to fail on non-existent partitions, we cannot batch expressions. // That is because we actually have to check each separate expression for existence. // We could do a small optimization for the case where expr has all columns and all diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index 7f167a204d..8dd567406c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -98,7 +98,7 @@ private void analyzeDropFunction(ASTNode ast) throws SemanticException { // we want to signal an error if the function doesn't exist and we're // configured not to ignore this boolean throwException = - !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT); + !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT); FunctionInfo info = FunctionRegistry.getFunctionInfo(functionName); if (info == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 0197762fce..687122a45f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -548,8 +548,7 @@ private static ImportTableDesc getBaseCreateTableDescFromTable(String dbName, private static Task dropTableTask(Table table, EximUtil.SemanticAnalyzerWrapperContext x, ReplicationSpec replicationSpec) { - DropTableDesc dropTblDesc = new DropTableDesc(table.getTableName(), table.getTableType(), - true, false, replicationSpec); + DropTableDesc dropTblDesc = new DropTableDesc(table.getTableName(), true, false, replicationSpec); return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), dropTblDesc), x.getConf()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java index 857a5afb54..e79512eade 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java @@ -150,7 +150,7 @@ private void analyzeDropMacro(ASTNode ast) throws SemanticException { // we want to signal an error if the function doesn't exist and we're // configured not to ignore this boolean throwException = - !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT); + !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT); // Temp macros are not allowed to have qualified names. if (FunctionUtils.isQualifiedFunctionName(functionName)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java index 4a07473550..6e29d61cde 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java @@ -46,10 +46,8 @@ actualTblName = msg.getTable(); } - DropTableDesc dropTableDesc = new DropTableDesc( - actualDbName + "." + actualTblName, - null, true, true, context.eventOnlyReplicationSpec(), false - ); + DropTableDesc dropTableDesc = new DropTableDesc(actualDbName + "." + actualTblName, true, true, + context.eventOnlyReplicationSpec(), false); Task dropTableTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, dropTableDesc), context.hiveConf ); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java index e29e77826b..fff0a3db0c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java @@ -68,7 +68,7 @@ public void testDropMacroDoesNotExist() throws Exception { } @Test public void testDropMacroExistsDoNotIgnoreErrors() throws Exception { - conf.setBoolVar(ConfVars.DROPIGNORESNONEXISTENT, false); + conf.setBoolVar(ConfVars.DROP_IGNORES_NON_EXISTENT, false); FunctionRegistry.registerTemporaryUDF("SOME_MACRO", GenericUDFMacro.class); analyze(parse("DROP TEMPORARY MACRO SOME_MACRO")); } @@ -79,7 +79,7 @@ public void testDropMacro() throws Exception { } @Test(expected = SemanticException.class) public void testDropMacroNonExistent() throws Exception { - conf.setBoolVar(ConfVars.DROPIGNORESNONEXISTENT, false); + conf.setBoolVar(ConfVars.DROP_IGNORES_NON_EXISTENT, false); analyze(parse("DROP TEMPORARY MACRO SOME_MACRO")); } @Test @@ -88,7 +88,7 @@ public void testDropMacroNonExistentWithIfExists() throws Exception { } @Test public void testDropMacroNonExistentWithIfExistsDoNotIgnoreNonExistent() throws Exception { - conf.setBoolVar(ConfVars.DROPIGNORESNONEXISTENT, false); + conf.setBoolVar(ConfVars.DROP_IGNORES_NON_EXISTENT, false); analyze(parse("DROP TEMPORARY MACRO IF EXISTS SOME_MACRO")); } @Test diff --git ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java index 59a52a98a1..b087d3beab 100644 --- ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java +++ ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java @@ -81,7 +81,7 @@ private HiveConf newAuthEnabledConf() { private List getSettableParams() throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException { // get all the variable names being converted to regex in HiveConf, using reflection - Field varNameField = HiveConf.class.getDeclaredField("sqlStdAuthSafeVarNames"); + Field varNameField = HiveConf.class.getDeclaredField("SQL_STD_AUTH_SAFE_VAR_NAMES"); varNameField.setAccessible(true); List confVarList = Arrays.asList((String[]) varNameField.get(null)); diff --git ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out index 52cde21b1c..a0a47f8f48 100644 --- ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out +++ ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out @@ -644,7 +644,7 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: explain analyze drop view v_n5 POSTHOOK: type: DROPVIEW Stage-0 - Drop Table{"table:":"v_n5"} + Drop View{"view name:":"v_n5"} PREHOOK: query: create view v_n5 as with cte as (select * from src order by key limit 5) select * from cte diff --git ql/src/test/results/clientpositive/tez/explainuser_3.q.out ql/src/test/results/clientpositive/tez/explainuser_3.q.out index 4d58f5e5ee..7892bafc54 100644 --- ql/src/test/results/clientpositive/tez/explainuser_3.q.out +++ ql/src/test/results/clientpositive/tez/explainuser_3.q.out @@ -522,7 +522,7 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: explain drop view v_n1 POSTHOOK: type: DROPVIEW Stage-0 - Drop Table{"table:":"v_n1"} + Drop View{"view name:":"v_n1"} PREHOOK: query: explain create view v_n1 as with cte as (select * from src order by key limit 5) select * from cte