diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java index 026f251935..d070a0ddb8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java @@ -21,8 +21,18 @@ import java.util.Map; import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; @@ -33,6 +43,9 @@ * tableName command partitionSpec? */ public abstract class AbstractAlterTableAnalyzer extends BaseSemanticAnalyzer { + // Equivalent to acidSinks, but for DDL operations that change data. + private DDLDescWithWriteId ddlDescWithWriteId; + public AbstractAlterTableAnalyzer(QueryState queryState) throws SemanticException { super(queryState); } @@ -61,4 +74,125 @@ public void analyzeInternal(ASTNode root) throws SemanticException { protected abstract void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) throws SemanticException; + + protected void setAcidDdlDesc(DDLDescWithWriteId descWithWriteId) { + if(this.ddlDescWithWriteId != null) { + throw new IllegalStateException("ddlDescWithWriteId is already set: " + this.ddlDescWithWriteId); + } + this.ddlDescWithWriteId = descWithWriteId; + } + + @Override + public DDLDescWithWriteId getAcidDdlDesc() { + return ddlDescWithWriteId; + } + + protected void addInputsOutputsAlterTable(TableName tableName, Map partitionSpec, + AbstractAlterTableDesc desc, AlterTableType op, boolean doForceExclusive) throws SemanticException { + boolean isCascade = desc != null && desc.isCascade(); + boolean alterPartitions = partitionSpec != null && !partitionSpec.isEmpty(); + //cascade only occurs at table level then cascade to partition level + if (isCascade && alterPartitions) { + throw new SemanticException(ErrorMsg.ALTER_TABLE_PARTITION_CASCADE_NOT_SUPPORTED, op.getName()); + } + + Table table = getTable(tableName, true); + // cascade only occurs with partitioned table + if (isCascade && !table.isPartitioned()) { + throw new SemanticException(ErrorMsg.ALTER_TABLE_NON_PARTITIONED_TABLE_CASCADE_NOT_SUPPORTED); + } + + // Determine the lock type to acquire + WriteEntity.WriteType writeType = doForceExclusive ? + WriteType.DDL_EXCLUSIVE : determineAlterTableWriteType(table, desc, op); + + if (!alterPartitions) { + inputs.add(new ReadEntity(table)); + WriteEntity alterTableOutput = new WriteEntity(table, writeType); + outputs.add(alterTableOutput); + //do not need the lock for partitions since they are covered by the table lock + if (isCascade) { + for (Partition part : getPartitions(table, partitionSpec, false)) { + outputs.add(new WriteEntity(part, WriteEntity.WriteType.DDL_NO_LOCK)); + } + } + } else { + ReadEntity re = new ReadEntity(table); + // In the case of altering a table for its partitions we don't need to lock the table + // itself, just the partitions. But the table will have a ReadEntity. So mark that + // ReadEntity as no lock. + re.noLockNeeded(); + inputs.add(re); + + if (AlterTableUtils.isFullPartitionSpec(table, partitionSpec)) { + // Fully specified partition spec + Partition part = getPartition(table, partitionSpec, true); + outputs.add(new WriteEntity(part, writeType)); + } else { + // Partial partition spec supplied. Make sure this is allowed. + if (!AlterTableType.SUPPORT_PARTIAL_PARTITION_SPEC.contains(op)) { + throw new SemanticException( + ErrorMsg.ALTER_TABLE_TYPE_PARTIAL_PARTITION_SPEC_NO_SUPPORTED, op.getName()); + } else if (!conf.getBoolVar(HiveConf.ConfVars.DYNAMICPARTITIONING)) { + throw new SemanticException(ErrorMsg.DYNAMIC_PARTITION_DISABLED); + } + + for (Partition part : getPartitions(table, partitionSpec, true)) { + outputs.add(new WriteEntity(part, writeType)); + } + } + } + + if (desc != null) { + validateAlterTableType(table, op, desc.expectView()); + } + } + + // For the time while all the alter table operations are getting migrated there is a duplication of this method here + private WriteType determineAlterTableWriteType(Table tab, AbstractAlterTableDesc desc, AlterTableType op) { + boolean convertingToAcid = false; + if (desc != null && desc.getProps() != null && + Boolean.parseBoolean(desc.getProps().get(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL))) { + convertingToAcid = true; + } + if (!AcidUtils.isTransactionalTable(tab) && convertingToAcid) { + // non-acid to transactional conversion (property itself) must be mutexed to prevent concurrent writes. + // See HIVE-16688 for use cases. + return WriteType.DDL_EXCLUSIVE; + } + return WriteEntity.determineAlterTableWriteType(op); + } + + private void validateAlterTableType(Table tbl, AlterTableType op) throws SemanticException { + validateAlterTableType(tbl, op, false); + } + + private void validateAlterTableType(Table tbl, AlterTableType op, boolean expectView) + throws SemanticException { + if (tbl.isView()) { + if (!expectView) { + throw new SemanticException(ErrorMsg.ALTER_COMMAND_FOR_VIEWS.getMsg()); + } + + switch (op) { + case ADDPARTITION: + case DROPPARTITION: + case RENAMEPARTITION: + case ADDPROPS: + case DROPPROPS: + case RENAME: + // allow this form + break; + default: + throw new SemanticException(ErrorMsg.ALTER_VIEW_DISALLOWED_OP.getMsg(op.toString())); + } + } else { + if (expectView) { + throw new SemanticException(ErrorMsg.ALTER_COMMAND_FOR_TABLES.getMsg()); + } + } + if (tbl.isNonNative() && !AlterTableType.NON_NATIVE_TABLE_ALLOWED.contains(op)) { + throw new SemanticException(ErrorMsg.ALTER_TABLE_NON_NATIVE.getMsg(tbl.getTableName())); + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java index 2681dc5330..9a385ff334 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java @@ -88,7 +88,7 @@ public int execute() throws HiveException { throws HiveException { List partitions = null; if (partSpec != null) { - if (DDLSemanticAnalyzer.isFullSpec(tbl, partSpec)) { + if (AlterTableUtils.isFullPartitionSpec(tbl, partSpec)) { partitions = new ArrayList(); Partition part = context.getDb().getPartition(tbl, partSpec, false); if (part == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java index 4e76536a81..363692825c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java @@ -23,6 +23,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -70,4 +71,13 @@ public static boolean isSchemaEvolutionEnabled(Table table, Configuration conf) return AcidUtils.isTablePropertyTransactional(table.getMetadata()) || HiveConf.getBoolVar(conf, ConfVars.HIVE_SCHEMA_EVOLUTION); } + + public static boolean isFullPartitionSpec(Table table, Map partitionSpec) { + for (FieldSchema partitionCol : table.getPartCols()) { + if (partitionSpec.get(partitionCol.getName()) == null) { + return false; + } + } + return true; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsAnalyzer.java new file mode 100644 index 0000000000..937486d255 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsAnalyzer.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column.add; + +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for add columns commands. + */ +@DDLType(type=HiveParser.TOK_ALTERTABLE_ADDCOLS) +public class AlterTableAddColumnsAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableAddColumnsAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + List newCols = getColumns((ASTNode) command.getChild(0)); + boolean isCascade = false; + if (null != command.getFirstChildWithType(HiveParser.TOK_CASCADE)) { + isCascade = true; + } + + AlterTableAddColumnsDesc desc = new AlterTableAddColumnsDesc(tableName, partitionSpec, isCascade, newCols); + Table table = getTable(tableName, true); + if (AcidUtils.isTransactionalTable(table)) { + setAcidDdlDesc(desc); + } + + addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsDesc.java index 07507c177d..a17b6aa952 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.add; import java.util.List; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsOperation.java index ea62519949..57bb7229a5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/AlterTableAddColumnsOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.add; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/package-info.java new file mode 100644 index 0000000000..3c94b36545 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/add/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Add columns DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.table.column.add; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnAnalyzer.java new file mode 100644 index 0000000000..af4039ad01 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnAnalyzer.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column.change; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; +import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; +import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; +import org.apache.hadoop.hive.metastore.api.SkewedInfo; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.ConstraintsUtils; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +import com.google.common.collect.ImmutableList; + +/** + * Analyzer for change columns commands. + */ +@DDLType(type=HiveParser.TOK_ALTERTABLE_RENAMECOL) +public class AlterTableChangeColumnAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableChangeColumnAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + //col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT] + String oldColumnName = command.getChild(0).getText(); + String newColumnName = command.getChild(1).getText(); + String newType = getTypeStringFromAST((ASTNode) command.getChild(2)); + + Table table = getTable(tableName); + SkewedInfo skewInfo = table.getTTable().getSd().getSkewedInfo(); + if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && + skewInfo.getSkewedColNames().contains(oldColumnName)) { + throw new SemanticException(oldColumnName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg()); + } + + String newComment = null; + boolean first = false; + String flagCol = null; + boolean isCascade = false; + ASTNode constraintChild = null; + for (int i = 3; i < command.getChildCount(); i++) { + ASTNode child = (ASTNode)command.getChild(i); + switch (child.getToken().getType()) { + case HiveParser.StringLiteral: + newComment = unescapeSQLString(child.getText()); + break; + case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION: + flagCol = unescapeIdentifier(child.getChild(0).getText()); + break; + case HiveParser.KW_FIRST: + first = true; + break; + case HiveParser.TOK_CASCADE: + isCascade = true; + break; + case HiveParser.TOK_RESTRICT: + break; + default: + constraintChild = child; + } + } + + Constraints constraints = getConstraints(tableName, command, newColumnName, table, constraintChild); + + AlterTableChangeColumnDesc desc = new AlterTableChangeColumnDesc(tableName, partitionSpec, isCascade, constraints, + unescapeIdentifier(oldColumnName), unescapeIdentifier(newColumnName), newType, newComment, first, flagCol); + if (AcidUtils.isTransactionalTable(table)) { + // Note: we might actually need it only when certain changes (e.g. name or type?) are made. + setAcidDdlDesc(desc); + } + + addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } + + private Constraints getConstraints(TableName tableName, ASTNode command, String newColumnName, Table table, + ASTNode constraintChild) throws SemanticException { + List primaryKeys = null; + List foreignKeys = null; + List uniqueConstraints = null; + List notNullConstraints = null; + List defaultConstraints = null; + List checkConstraints = null; + if (constraintChild != null) { + // Process column constraint + switch (constraintChild.getToken().getType()) { + case HiveParser.TOK_CHECK_CONSTRAINT: + checkConstraints = new ArrayList<>(); + ConstraintsUtils.processCheckConstraints(tableName, constraintChild, ImmutableList.of(newColumnName), + checkConstraints, (ASTNode) command.getChild(2), this.ctx.getTokenRewriteStream()); + break; + case HiveParser.TOK_DEFAULT_VALUE: + defaultConstraints = new ArrayList<>(); + ConstraintsUtils.processDefaultConstraints(tableName, constraintChild, ImmutableList.of(newColumnName), + defaultConstraints, (ASTNode) command.getChild(2), this.ctx.getTokenRewriteStream()); + break; + case HiveParser.TOK_NOT_NULL: + notNullConstraints = new ArrayList<>(); + ConstraintsUtils.processNotNullConstraints(tableName, constraintChild, ImmutableList.of(newColumnName), + notNullConstraints); + break; + case HiveParser.TOK_UNIQUE: + uniqueConstraints = new ArrayList<>(); + ConstraintsUtils.processUniqueConstraints(tableName, constraintChild, ImmutableList.of(newColumnName), + uniqueConstraints); + break; + case HiveParser.TOK_PRIMARY_KEY: + primaryKeys = new ArrayList<>(); + ConstraintsUtils.processPrimaryKeys(tableName, constraintChild, ImmutableList.of(newColumnName), primaryKeys); + break; + case HiveParser.TOK_FOREIGN_KEY: + foreignKeys = new ArrayList<>(); + ConstraintsUtils.processForeignKeys(tableName, constraintChild, foreignKeys); + break; + default: + throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( + constraintChild.getToken().getText())); + } + } + + /* Validate the operation of renaming a column name. */ + if (checkConstraints != null && !checkConstraints.isEmpty()) { + ConstraintsUtils.validateCheckConstraint(table.getCols(), checkConstraints, ctx.getConf()); + } + + if (table.getTableType() == TableType.EXTERNAL_TABLE && + ConstraintsUtils.hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg( + "Constraints are disallowed with External tables. Only RELY is allowed.")); + } + + return new Constraints(primaryKeys, foreignKeys, notNullConstraints, uniqueConstraints, defaultConstraints, + checkConstraints); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnDesc.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnDesc.java index 43943f5242..ee90efd443 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnDesc.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.change; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnOperation.java index a775a611da..806e3eb416 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/AlterTableChangeColumnOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.change; import java.util.ArrayList; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/package-info.java new file mode 100644 index 0000000000..fc08eef57d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/change/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Change columns DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.table.column.change; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsAnalyzer.java new file mode 100644 index 0000000000..eaebf117f7 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsAnalyzer.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column.replace; + +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for replace columns commands. + */ +@DDLType(type=HiveParser.TOK_ALTERTABLE_REPLACECOLS) +public class AlterTableReplaceColumnsAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableReplaceColumnsAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + List newCols = getColumns((ASTNode) command.getChild(0)); + boolean isCascade = false; + if (null != command.getFirstChildWithType(HiveParser.TOK_CASCADE)) { + isCascade = true; + } + + AlterTableReplaceColumnsDesc desc = new AlterTableReplaceColumnsDesc(tableName, partitionSpec, isCascade, newCols); + Table table = getTable(tableName, true); + if (AcidUtils.isTransactionalTable(table)) { + setAcidDdlDesc(desc); + } + + addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsDesc.java index 3600084614..8ba280d452 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsDesc.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.replace; import java.util.List; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsOperation.java index 599de64e1b..e46d2b5158 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/AlterTableReplaceColumnsOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.replace; import java.util.List; import java.util.Set; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/package-info.java similarity index 86% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/package-info.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/package-info.java index 447d61b794..dd75ddaf50 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/package-info.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/replace/package-info.java @@ -16,5 +16,5 @@ * limitations under the License. */ -/** Table column related DDL operation descriptions and operations. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +/** Replace columns DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.table.column.replace; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsAnalyzer.java new file mode 100644 index 0000000000..082114a003 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsAnalyzer.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column.show; + +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for show columns commands. + */ +@DDLType(type=HiveParser.TOK_SHOWCOLUMNS) +public class ShowColumnsAnalyzer extends BaseSemanticAnalyzer { + public ShowColumnsAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + // table name has to be present so min child 1 and max child 4 + if (root.getChildCount() > 4 || root.getChildCount() < 1) { + throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(root.toStringTree())); + } + + ctx.setResFile(ctx.getLocalTmpPath()); + + String tableName = getUnescapedName((ASTNode) root.getChild(0)); + String pattern = null; + switch (root.getChildCount()) { + case 1: // only tablename no pattern and db + break; + case 2: // tablename and pattern + pattern = unescapeSQLString(root.getChild(1).getText()); + break; + case 3: // specifies db + if (tableName.contains(".")) { + throw new SemanticException("Duplicates declaration for database name"); + } + tableName = getUnescapedName((ASTNode) root.getChild(2)) + "." + tableName; + break; + case 4: // specifies db and pattern + if (tableName.contains(".")) { + throw new SemanticException("Duplicates declaration for database name"); + } + tableName = getUnescapedName((ASTNode) root.getChild(2)) + "." + tableName; + pattern = unescapeSQLString(root.getChild(3).getText()); + break; + default: + break; + } + + Table table = getTable(tableName); + inputs.add(new ReadEntity(table)); + + ShowColumnsDesc desc = new ShowColumnsDesc(ctx.getResFile(), tableName, pattern); + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(ShowColumnsDesc.SCHEMA)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsDesc.java similarity index 93% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsDesc.java index 6d89bd061d..bf7dc06ae7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.show; import java.io.Serializable; @@ -38,10 +38,6 @@ private final String tableName; private final String pattern; - public ShowColumnsDesc(Path resFile, String tableName) { - this(resFile, tableName, null); - } - public ShowColumnsDesc(Path resFile, String tableName, String pattern) { this.resFile = resFile.toString(); this.pattern = pattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsOperation.java index 0ba0a191d6..ee98f605fc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.show; import java.io.DataOutputStream; import java.io.IOException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/package-info.java new file mode 100644 index 0000000000..e911352d25 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Show columns DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.table.column.show; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsAnalyzer.java new file mode 100644 index 0000000000..db4e5229c7 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsAnalyzer.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column.update; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for update columns commands. + */ +@DDLType(type=HiveParser.TOK_ALTERTABLE_UPDATECOLUMNS) +public class AlterTableUpdateColumnsAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableUpdateColumnsAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + boolean isCascade = (null != command.getFirstChildWithType(HiveParser.TOK_CASCADE)); + + AlterTableUpdateColumnsDesc desc = new AlterTableUpdateColumnsDesc(tableName, partitionSpec, isCascade); + Table table = getTable(tableName); + if (AcidUtils.isTransactionalTable(table)) { + setAcidDdlDesc(desc); + } + + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsDesc.java index edd3045237..989b3cb76e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.update; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsOperation.java index 65054a3b8e..c84d6d700c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/AlterTableUpdateColumnsOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.column; +package org.apache.hadoop.hive.ql.ddl.table.column.update; import java.util.Collection; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/package-info.java new file mode 100644 index 0000000000..c330aebac5 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/update/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Update columns DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.table.column.update; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/ConstraintsUtils.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/ConstraintsUtils.java index ef6929d062..69a40ddf53 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/ConstraintsUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/ConstraintsUtils.java @@ -20,10 +20,13 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import org.antlr.runtime.TokenRewriteStream; import org.antlr.runtime.tree.Tree; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; import org.apache.hadoop.hive.metastore.api.SQLForeignKey; @@ -31,16 +34,21 @@ import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.ParseDriver; +import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.TypeCheckCtx; import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; +import org.apache.hadoop.hive.ql.parse.UnparseTranslator; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -417,4 +425,104 @@ public static void processForeignKeys(TableName tableName, ASTNode node, List cols, List checkConstraints, + Configuration conf) throws SemanticException{ + // create colinfo and then row resolver + RowResolver rr = new RowResolver(); + for (FieldSchema col : cols) { + ColumnInfo ci = new ColumnInfo(col.getName(),TypeInfoUtils.getTypeInfoFromTypeString(col.getType()), null, false); + rr.put(null, col.getName(), ci); + } + + TypeCheckCtx typeCheckCtx = new TypeCheckCtx(rr); + // TypeCheckProcFactor expects typecheckctx to have unparse translator + UnparseTranslator unparseTranslator = new UnparseTranslator(conf); + typeCheckCtx.setUnparseTranslator(unparseTranslator); + for (SQLCheckConstraint cc : checkConstraints) { + try { + ParseDriver parseDriver = new ParseDriver(); + ASTNode checkExprAST = parseDriver.parseExpression(cc.getCheck_expression()); + validateCheckExprAST(checkExprAST); + Map genExprs = TypeCheckProcFactory.genExprNode(checkExprAST, typeCheckCtx); + ExprNodeDesc checkExpr = genExprs.get(checkExprAST); + if (checkExpr == null) { + throw new SemanticException( + ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type for CHECK constraint: ") + cc.getCheck_expression()); + } + if (checkExpr.getTypeInfo().getTypeName() != serdeConstants.BOOLEAN_TYPE_NAME) { + throw new SemanticException( + ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Only boolean type is supported for CHECK constraint: ") + + cc.getCheck_expression() + ". Found: " + checkExpr.getTypeInfo().getTypeName()); + } + validateCheckExpr(checkExpr); + } catch (Exception e) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid CHECK constraint expression: ") + + cc.getCheck_expression() + ". " + e.getMessage(), e); + } + } + } + + // given an ast node this method recursively goes over checkExpr ast. If it finds a node of type TOK_SUBQUERY_EXPR + // it throws an error. + // This method is used to validate check expression since check expression isn't allowed to have subquery + private static void validateCheckExprAST(ASTNode checkExpr) throws SemanticException { + if (checkExpr == null) { + return; + } + if (checkExpr.getType() == HiveParser.TOK_SUBQUERY_EXPR) { + throw new SemanticException( + ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Subqueries are not allowed in Check Constraints")); + } + for (int i = 0; i < checkExpr.getChildCount(); i++) { + validateCheckExprAST((ASTNode)checkExpr.getChild(i)); + } + } + + // recursively go through expression and make sure the following: + // * If expression is UDF it is not permanent UDF + private static void validateCheckExpr(ExprNodeDesc checkExpr) throws SemanticException { + if (checkExpr instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc)checkExpr; + boolean isBuiltIn = FunctionRegistry.isBuiltInFuncExpr(funcDesc); + boolean isPermanent = FunctionRegistry.isPermanentFunction(funcDesc); + if (!isBuiltIn && !isPermanent) { + throw new SemanticException( + ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Temporary UDFs are not allowed in Check Constraints")); + } + + if (FunctionRegistry.impliesOrder(funcDesc.getFuncText())) { + throw new SemanticException( + ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Window functions are not allowed in Check Constraints")); + } + } + + if (checkExpr.getChildren() == null) { + return; + } + for (ExprNodeDesc childExpr:checkExpr.getChildren()) { + validateCheckExpr(childExpr); + } + } + + public static boolean hasEnabledOrValidatedConstraints(List notNullConstraints, + List defaultConstraints, List checkConstraints) { + if (notNullConstraints != null) { + for (SQLNotNullConstraint nnC : notNullConstraints) { + if (nnC.isEnable_cstr() || nnC.isValidate_cstr()) { + return true; + } + } + } + + if (defaultConstraints != null && !defaultConstraints.isEmpty()) { + return true; + } + + if (checkConstraints != null && !checkConstraints.isEmpty()) { + return true; + } + + return false; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index da99fae2bb..32edabccf3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -64,9 +64,7 @@ import org.apache.hadoop.hive.ql.cache.results.CacheUsage; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.ddl.table.constraint.ConstraintsUtils; -import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FetchTask; -import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -85,7 +83,6 @@ import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; -import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; @@ -691,108 +688,6 @@ private static String spliceString(String str, int i, int length, String replace new ArrayList<>(), new ArrayList<>(), new ArrayList<>(), conf); } - // given an ast node this method recursively goes over checkExpr ast. If it finds a node of type TOK_SUBQUERY_EXPR - // it throws an error. - // This method is used to validate check expression since check expression isn't allowed to have subquery - private static void validateCheckExprAST(ASTNode checkExpr) throws SemanticException { - if(checkExpr == null) { - return; - } - if(checkExpr.getType() == HiveParser.TOK_SUBQUERY_EXPR) { - throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Subqueries are not allowed " - + "in Check Constraints")); - } - for(int i=0; i cols, List checkConstraints, - Configuration conf) - throws SemanticException{ - - // create colinfo and then row resolver - RowResolver rr = new RowResolver(); - for(FieldSchema col: cols) { - ColumnInfo ci = new ColumnInfo(col.getName(),TypeInfoUtils.getTypeInfoFromTypeString(col.getType()), - null, false); - rr.put(null, col.getName(), ci); - } - - TypeCheckCtx typeCheckCtx = new TypeCheckCtx(rr); - // TypeCheckProcFactor expects typecheckctx to have unparse translator - UnparseTranslator unparseTranslator = new UnparseTranslator(conf); - typeCheckCtx.setUnparseTranslator(unparseTranslator); - for(SQLCheckConstraint cc:checkConstraints) { - try { - ParseDriver parseDriver = new ParseDriver(); - ASTNode checkExprAST = parseDriver.parseExpression(cc.getCheck_expression()); - validateCheckExprAST(checkExprAST); - Map genExprs = TypeCheckProcFactory - .genExprNode(checkExprAST, typeCheckCtx); - ExprNodeDesc checkExpr = genExprs.get(checkExprAST); - if(checkExpr == null) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type for CHECK constraint: ") - + cc.getCheck_expression()); - } - if(checkExpr.getTypeInfo().getTypeName() != serdeConstants.BOOLEAN_TYPE_NAME) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Only boolean type is supported for CHECK constraint: ") - + cc.getCheck_expression() + ". Found: " + checkExpr.getTypeInfo().getTypeName()); - } - validateCheckExpr(checkExpr); - } catch(Exception e) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid CHECK constraint expression: ") - + cc.getCheck_expression() + ". " + e.getMessage(), e); - } - } - } - - protected boolean hasEnabledOrValidatedConstraints(List notNullConstraints, - List defaultConstraints, - List checkConstraints){ - if(notNullConstraints != null) { - for (SQLNotNullConstraint nnC : notNullConstraints) { - if (nnC.isEnable_cstr() || nnC.isValidate_cstr()) { - return true; - } - } - } - if(defaultConstraints!= null && !defaultConstraints.isEmpty()) { - return true; - } - if(checkConstraints!= null && !checkConstraints.isEmpty()) { - return true; - } - return false; - } - public static void checkColumnName(String columnName) throws SemanticException { if (VirtualColumn.VIRTUAL_COLUMN_NAMES.contains(columnName.toUpperCase())) { throw new SemanticException(ErrorMsg.INVALID_COLUMN_NAME.getMsg(columnName)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 20b0ccd94b..67ac0387ac 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -48,13 +48,6 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; -import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; -import org.apache.hadoop.hive.metastore.api.SQLForeignKey; -import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; -import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; -import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; -import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.Driver; @@ -66,13 +59,6 @@ import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; -import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableAddColumnsDesc; -import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableChangeColumnDesc; -import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableReplaceColumnsDesc; -import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableUpdateColumnsDesc; -import org.apache.hadoop.hive.ql.ddl.table.column.ShowColumnsDesc; -import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; -import org.apache.hadoop.hive.ql.ddl.table.constraint.ConstraintsUtils; import org.apache.hadoop.hive.ql.ddl.table.info.DescTableDesc; import org.apache.hadoop.hive.ql.ddl.table.info.ShowTablePropertiesDesc; import org.apache.hadoop.hive.ql.ddl.table.info.ShowTableStatusDesc; @@ -163,7 +149,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; /** @@ -175,7 +160,6 @@ private static final Map TokenToTypeName = new HashMap(); private final Set reservedPartitionValues; - private WriteEntity alterTableOutput; // Equivalent to acidSinks, but for DDL operations that change data. private DDLDescWithWriteId ddlDescWithWriteId; @@ -281,12 +265,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { analyzeAlterTableArchive(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { analyzeAlterTableArchive(tName, ast, true); - } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { - analyzeAlterTableAddCols(tName, ast, partSpec); - } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { - analyzeAlterTableReplaceCols(tName, ast, partSpec); - } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { - analyzeAlterTableRenameCol(tName, ast, partSpec); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) { analyzeAlterTableAddParts(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) { @@ -333,8 +311,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS || ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_UPDATECOLSTATS){ analyzeAlterTableUpdateStats(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLUMNS) { - analyzeAlterTableUpdateColumns(ast, tName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_OWNER) { analyzeAlterTableOwner(ast, tName); } @@ -351,10 +327,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowTables(ast); break; - case HiveParser.TOK_SHOWCOLUMNS: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeShowColumns(ast); - break; case HiveParser.TOK_SHOW_TABLESTATUS: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowTableStatus(ast); @@ -985,7 +957,7 @@ private void addInputsOutputsAlterTable(TableName tableName, Map if (!alterPartitions) { inputs.add(new ReadEntity(tab)); - alterTableOutput = new WriteEntity(tab, writeType); + WriteEntity alterTableOutput = new WriteEntity(tab, writeType); outputs.add(alterTableOutput); //do not need the lock for partitions since they are covered by the table lock if (isCascade) { @@ -1273,23 +1245,6 @@ private void analyzeAlterTableCompact(ASTNode ast, TableName tableName, rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } - private void analyzeAlterTableUpdateColumns(ASTNode ast, TableName tableName, - Map partSpec) throws SemanticException { - - boolean isCascade = false; - if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { - isCascade = true; - } - - AlterTableUpdateColumnsDesc alterTblDesc = new AlterTableUpdateColumnsDesc(tableName, partSpec, isCascade); - Table tbl = getTable(tableName); - if (AcidUtils.isTransactionalTable(tbl)) { - setAcidDdlDesc(alterTblDesc); - } - - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); - } - /** * Utility class to resolve QualifiedName */ @@ -1573,50 +1528,6 @@ private void analyzeShowTables(ASTNode ast) throws SemanticException { setFetchTask(createFetchTask(showTblsDesc.getSchema())); } - private void analyzeShowColumns(ASTNode ast) throws SemanticException { - - // table name has to be present so min child 1 and max child 4 - if (ast.getChildCount() > 4 || ast.getChildCount()<1) { - throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(ast.toStringTree())); - } - - String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - - ShowColumnsDesc showColumnsDesc = null; - String pattern = null; - switch(ast.getChildCount()) { - case 1: // only tablename no pattern and db - showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName); - break; - case 2: // tablename and pattern - pattern = unescapeSQLString(ast.getChild(1).getText()); - showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName, pattern); - break; - case 3: // specifies db - if (tableName.contains(".")) { - throw new SemanticException("Duplicates declaration for database name"); - } - tableName = getUnescapedName((ASTNode) ast.getChild(2)) + "." + tableName; - showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName); - break; - case 4: // specifies db and pattern - if (tableName.contains(".")) { - throw new SemanticException("Duplicates declaration for database name"); - } - tableName = getUnescapedName((ASTNode) ast.getChild(2)) + "." + tableName; - pattern = unescapeSQLString(ast.getChild(3).getText()); - showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName, pattern); - break; - default: - break; - } - - Table tab = getTable(tableName); - inputs.add(new ReadEntity(tab)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showColumnsDesc))); - setFetchTask(createFetchTask(ShowColumnsDesc.SCHEMA)); - } - private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { ShowTableStatusDesc showTblStatusDesc; String tableNames = getUnescapedName((ASTNode) ast.getChild(0)); @@ -1887,118 +1798,6 @@ private void analyzeAlterTableRename(TableName source, ASTNode ast, boolean expe rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } - private void analyzeAlterTableRenameCol(TableName tName, ASTNode ast, Map partSpec) - throws SemanticException { - String newComment = null; - boolean first = false; - String flagCol = null; - boolean isCascade = false; - //col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT] - String oldColName = ast.getChild(0).getText(); - String newColName = ast.getChild(1).getText(); - String newType = getTypeStringFromAST((ASTNode) ast.getChild(2)); - ASTNode constraintChild = null; - int childCount = ast.getChildCount(); - for (int i = 3; i < childCount; i++) { - ASTNode child = (ASTNode)ast.getChild(i); - switch (child.getToken().getType()) { - case HiveParser.StringLiteral: - newComment = unescapeSQLString(child.getText()); - break; - case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION: - flagCol = unescapeIdentifier(child.getChild(0).getText()); - break; - case HiveParser.KW_FIRST: - first = true; - break; - case HiveParser.TOK_CASCADE: - isCascade = true; - break; - case HiveParser.TOK_RESTRICT: - break; - default: - constraintChild = child; - } - } - List primaryKeys = null; - List foreignKeys = null; - List uniqueConstraints = null; - List notNullConstraints = null; - List defaultConstraints= null; - List checkConstraints= null; - if (constraintChild != null) { - // Process column constraint - switch (constraintChild.getToken().getType()) { - case HiveParser.TOK_CHECK_CONSTRAINT: - checkConstraints = new ArrayList<>(); - ConstraintsUtils.processCheckConstraints(tName, constraintChild, ImmutableList.of(newColName), checkConstraints, - (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); - break; - case HiveParser.TOK_DEFAULT_VALUE: - defaultConstraints = new ArrayList<>(); - ConstraintsUtils.processDefaultConstraints(tName, constraintChild, ImmutableList.of(newColName), - defaultConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); - break; - case HiveParser.TOK_NOT_NULL: - notNullConstraints = new ArrayList<>(); - ConstraintsUtils.processNotNullConstraints(tName, constraintChild, ImmutableList.of(newColName), - notNullConstraints); - break; - case HiveParser.TOK_UNIQUE: - uniqueConstraints = new ArrayList<>(); - ConstraintsUtils.processUniqueConstraints(tName, constraintChild, ImmutableList.of(newColName), - uniqueConstraints); - break; - case HiveParser.TOK_PRIMARY_KEY: - primaryKeys = new ArrayList<>(); - ConstraintsUtils.processPrimaryKeys(tName, constraintChild, ImmutableList.of(newColName), primaryKeys); - break; - case HiveParser.TOK_FOREIGN_KEY: - foreignKeys = new ArrayList<>(); - ConstraintsUtils.processForeignKeys(tName, constraintChild, foreignKeys); - break; - default: - throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( - constraintChild.getToken().getText())); - } - } - - /* Validate the operation of renaming a column name. */ - Table tab = getTable(tName); - - if(checkConstraints != null && !checkConstraints.isEmpty()) { - validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf()); - } - - if(tab.getTableType() == TableType.EXTERNAL_TABLE - && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)){ - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " - + "Only RELY is allowed.")); - } - - SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo(); - if ((null != skewInfo) - && (null != skewInfo.getSkewedColNames()) - && skewInfo.getSkewedColNames().contains(oldColName)) { - throw new SemanticException(oldColName - + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg()); - } - - Constraints constraints = new Constraints(primaryKeys, foreignKeys, notNullConstraints, uniqueConstraints, - defaultConstraints, checkConstraints); - AlterTableChangeColumnDesc alterTblDesc = new AlterTableChangeColumnDesc(tName, partSpec, isCascade, constraints, - unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol); - addInputsOutputsAlterTable(tName, partSpec, alterTblDesc, alterTblDesc.getType(), false); - if (AcidUtils.isTransactionalTable(tab)) { - // Note: we might actually need it only when certain changes (e.g. name or type?) are made. - setAcidDdlDesc(alterTblDesc); - } - - - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - private void analyzeAlterTableRenamePart(ASTNode ast, TableName tblName, Map oldPartSpec) throws SemanticException { Table tab = getTable(tblName, true); @@ -2039,44 +1838,6 @@ private void analyzeAlterTableBucketNum(ASTNode ast, TableName tblName, Map partSpec) - throws SemanticException { - - List newCols = getColumns((ASTNode) ast.getChild(0)); - boolean isCascade = false; - if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { - isCascade = true; - } - - AlterTableAddColumnsDesc desc = new AlterTableAddColumnsDesc(tName, partSpec, isCascade, newCols); - Table table = getTable(tName, true); - if (AcidUtils.isTransactionalTable(table)) { - setAcidDdlDesc(desc); - } - - addInputsOutputsAlterTable(tName, partSpec, desc, desc.getType(), false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); - } - - private void analyzeAlterTableReplaceCols(TableName tName, ASTNode ast, Map partSpec) - throws SemanticException { - - List newCols = getColumns((ASTNode) ast.getChild(0)); - boolean isCascade = false; - if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { - isCascade = true; - } - - AlterTableReplaceColumnsDesc alterTblDesc = new AlterTableReplaceColumnsDesc(tName, partSpec, isCascade, newCols); - Table table = getTable(tName, true); - if (AcidUtils.isTransactionalTable(table)) { - setAcidDdlDesc(alterTblDesc); - } - - addInputsOutputsAlterTable(tName, partSpec, alterTblDesc, alterTblDesc.getType(), false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - private void analyzeAlterTableDropParts(TableName tName, ASTNode ast, boolean expectView) throws SemanticException { boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null) diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 90549f9f3a..006a55e5e6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -104,6 +104,7 @@ import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.misc.hooks.InsertCommitHookDesc; +import org.apache.hadoop.hive.ql.ddl.table.constraint.ConstraintsUtils; import org.apache.hadoop.hive.ql.ddl.table.create.CreateTableDesc; import org.apache.hadoop.hive.ql.ddl.table.create.like.CreateTableLikeDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableUnsetPropertiesDesc; @@ -13524,16 +13525,16 @@ ASTNode analyzeCreateTable( throw new SemanticException("Unrecognized command."); } - if(isExt && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)){ + if (isExt && ConstraintsUtils.hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, + checkConstraints)) { throw new SemanticException( ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed.")); } - if(checkConstraints != null && !checkConstraints.isEmpty()) { - validateCheckConstraint(cols, checkConstraints, ctx.getConf()); + if (checkConstraints != null && !checkConstraints.isEmpty()) { + ConstraintsUtils.validateCheckConstraint(cols, checkConstraints, ctx.getConf()); } - storageFormat.fillDefaultStorageFormat(isExt, false); // check for existence of table diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 4dbb06ef9d..137f721042 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -108,7 +108,6 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_MSCK: case HiveParser.TOK_SHOWTABLES: - case HiveParser.TOK_SHOWCOLUMNS: case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOW_TBLPROPERTIES: case HiveParser.TOK_SHOWPARTITIONS: