diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java new file mode 100644 index 0000000000..3ff04131d0 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table; + +import java.io.Serializable; +import java.util.Map; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; +import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * Abstract ancestor of all ALTER TABLE descriptors that are handled by the AlterTableWithWriteIdOperations framework. + */ +public abstract class AbstractAlterTableDesc implements DDLDesc, DDLDescWithWriteId, Serializable { + private static final long serialVersionUID = 1L; + + private final AlterTableTypes type; + private final String tableName; + private final Map partitionSpec; + private final ReplicationSpec replicationSpec; + private final boolean isCascade; + private final boolean expectView; + + private Map props; + + private Long writeId; + + public AbstractAlterTableDesc(AlterTableTypes type, String tableName, Map partitionSpec, + ReplicationSpec replicationSpec, boolean isCascade, boolean expectView) throws SemanticException { + this.type = type; + this.tableName = String.join(".", Utilities.getDbTableName(tableName)); + this.partitionSpec = partitionSpec; + this.replicationSpec = replicationSpec; + this.isCascade = isCascade; + this.expectView = expectView; + } + + public AlterTableTypes getType() { + return type; + } + + @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getTableName() { + return tableName; + } + + @Explain(displayName = "partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public Map getPartitionSpec() { + return partitionSpec; + } + + public ReplicationSpec getReplicationSpec() { + return replicationSpec; + } + + @Explain(displayName = "cascade", displayOnlyOnTrue = true, + explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public boolean isCascade() { + return isCascade; + } + + public boolean expectView() { + return expectView; + } + + public Map getProps() { + return props; + } + + @Override + public String getFullTableName() { + return tableName; + } + + @Override + public void setWriteId(long writeId) { + this.writeId = writeId; + } + + public Long getWriteId() { + return writeId; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java new file mode 100644 index 0000000000..5d8cd94939 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java @@ -0,0 +1,201 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaHook; +import org.apache.hadoop.hive.metastore.api.EnvironmentContext; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintOperation; +import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Operation process of running some alter table command that requires write id. + */ +public abstract class AbstractAlterTableOperation extends DDLOperation { + private final AbstractAlterTableDesc desc; + + public AbstractAlterTableOperation(DDLOperationContext context, AbstractAlterTableDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + if (!AlterTableUtils.allowOperationInReplicationScope(context.getDb(), desc.getTableName(), null, + desc.getReplicationSpec())) { + // no alter, the table is missing either due to drop/rename which follows the alter. + // or the existing table is newer than our update. + LOG.debug("DDLTask: Alter Table is skipped as table {} is newer than update", desc.getTableName()); + return 0; + } + + Table oldTable = context.getDb().getTable(desc.getTableName()); + List partitions = getPartitions(oldTable, desc.getPartitionSpec(), context); + + // Don't change the table object returned by the metastore, as we'll mess with it's caches. + Table table = oldTable.copy(); + + EnvironmentContext environmentContext = initializeEnvironmentContext(null); + + if (partitions == null) { + doAlteration(table, null); + } else { + for (Partition partition : partitions) { + doAlteration(table, partition); + } + } + + finalizeAlterTableWithWriteIdOp(table, oldTable, partitions, context, environmentContext, desc); + return 0; + } + + private List getPartitions(Table tbl, Map partSpec, DDLOperationContext context) + throws HiveException { + List partitions = null; + if (partSpec != null) { + if (DDLSemanticAnalyzer.isFullSpec(tbl, partSpec)) { + partitions = new ArrayList(); + Partition part = context.getDb().getPartition(tbl, partSpec, false); + if (part == null) { + // User provided a fully specified partition spec but it doesn't exist, fail. + throw new HiveException(ErrorMsg.INVALID_PARTITION, + StringUtils.join(partSpec.keySet(), ',') + " for table " + tbl.getTableName()); + + } + partitions.add(part); + } else { + // DDLSemanticAnalyzer has already checked if partial partition specs are allowed, + // thus we should not need to check it here. + partitions = context.getDb().getPartitions(tbl, partSpec); + } + } + + return partitions; + } + + private EnvironmentContext initializeEnvironmentContext(EnvironmentContext environmentContext) { + EnvironmentContext result = environmentContext == null ? new EnvironmentContext() : environmentContext; + // do not need update stats in alter table/partition operations + if (result.getProperties() == null || + result.getProperties().get(StatsSetupConst.DO_NOT_UPDATE_STATS) == null) { + result.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); + } + return result; + } + + protected abstract void doAlteration(Table table, Partition partition) throws HiveException; + + protected StorageDescriptor getStorageDescriptor(Table tbl, Partition part) { + return (part == null ? tbl.getTTable().getSd() : part.getTPartition().getSd()); + } + + public void finalizeAlterTableWithWriteIdOp(Table table, Table oldTable, List partitions, + DDLOperationContext context, EnvironmentContext environmentContext, AbstractAlterTableDesc alterTable) + throws HiveException { + if (partitions == null) { + updateModifiedParameters(table.getTTable().getParameters(), context.getConf()); + table.checkValidity(context.getConf()); + } else { + for (Partition partition : partitions) { + updateModifiedParameters(partition.getParameters(), context.getConf()); + } + } + + try { + environmentContext.putToProperties(HiveMetaHook.ALTER_TABLE_OPERATION_TYPE, alterTable.getType().name()); + if (partitions == null) { + long writeId = alterTable.getWriteId() != null ? alterTable.getWriteId() : 0; + if (alterTable.getReplicationSpec() != null && alterTable.getReplicationSpec().isMigratingToTxnTable()) { + Long tmpWriteId = ReplUtils.getMigrationCurrentTblWriteId(context.getConf()); + if (tmpWriteId == null) { + throw new HiveException("DDLTask : Write id is not set in the config by open txn task for migration"); + } + writeId = tmpWriteId; + } + context.getDb().alterTable(alterTable.getTableName(), table, alterTable.isCascade(), environmentContext, true, + writeId); + } else { + // Note: this is necessary for UPDATE_STATISTICS command, that operates via ADDPROPS (why?). + // For any other updates, we don't want to do txn check on partitions when altering table. + boolean isTxn = false; + if (alterTable.getPartitionSpec() != null && alterTable.getType() == AlterTableTypes.ADDPROPS) { + // ADDPROPS is used to add replication properties like repl.last.id, which isn't + // transactional change. In case of replication check for transactional properties + // explicitly. + Map props = alterTable.getProps(); + if (alterTable.getReplicationSpec() != null && alterTable.getReplicationSpec().isInReplicationScope()) { + isTxn = (props.get(StatsSetupConst.COLUMN_STATS_ACCURATE) != null); + } else { + isTxn = true; + } + } + String qualifiedName = TableName.getDbTable(table.getTTable().getDbName(), table.getTTable().getTableName()); + context.getDb().alterPartitions(qualifiedName, partitions, environmentContext, isTxn); + } + // Add constraints if necessary + if (alterTable instanceof AbstractAlterTableWithConstraintsDesc) { + AlterTableAddConstraintOperation.addConstraints((AbstractAlterTableWithConstraintsDesc)alterTable, + context.getDb()); + } + } catch (InvalidOperationException e) { + LOG.error("alter table: ", e); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); + } + + // This is kind of hacky - the read entity contains the old table, whereas the write entity contains the new + // table. This is needed for rename - both the old and the new table names are passed + // Don't acquire locks for any of these, we have already asked for them in DDLSemanticAnalyzer. + if (partitions != null) { + for (Partition partition : partitions) { + context.getWork().getInputs().add(new ReadEntity(partition)); + DDLUtils.addIfAbsentByName(new WriteEntity(partition, WriteEntity.WriteType.DDL_NO_LOCK), context); + } + } else { + context.getWork().getInputs().add(new ReadEntity(oldTable)); + DDLUtils.addIfAbsentByName(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK), context); + } + } + + private static void updateModifiedParameters(Map params, HiveConf conf) throws HiveException { + String user = SessionState.getUserFromAuthenticator(); + params.put("last_modified_by", user); + params.put("last_modified_time", Long.toString(System.currentTimeMillis() / 1000)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableWithConstraintsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableWithConstraintsDesc.java new file mode 100644 index 0000000000..ebd3bdbf4d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableWithConstraintsDesc.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table; + +import java.util.Map; + +import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; + +/** + * Abstract ancestor of all ALTER TABLE descriptors that are handled by the AlterTableWithWriteIdOperations framework + * and also has constraint changes. + */ +public abstract class AbstractAlterTableWithConstraintsDesc extends AbstractAlterTableDesc { + private static final long serialVersionUID = 1L; + + private final Constraints constraints; + + public AbstractAlterTableWithConstraintsDesc(AlterTableTypes type, String tableName, + Map partitionSpec, ReplicationSpec replicationSpec, boolean isCascade, boolean expectView, + Constraints constraints) throws SemanticException { + super(type, tableName, partitionSpec, replicationSpec, isCascade, expectView); + this.constraints = constraints; + } + + public Constraints getConstraints() { + return constraints; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java index 3c6d7eada9..4e76536a81 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java @@ -20,6 +20,10 @@ import java.util.Map; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -61,4 +65,9 @@ public static boolean allowOperationInReplicationScope(Hive db, String tableName // Or the existing table is newer than our update. So, don't allow the update. return false; } + + public static boolean isSchemaEvolutionEnabled(Table table, Configuration conf) { + return AcidUtils.isTablePropertyTransactional(table.getMetadata()) || + HiveConf.getBoolVar(conf, ConfVars.HIVE_SCHEMA_EVOLUTION); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java new file mode 100644 index 0000000000..a15597c071 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for ALTER TABLE ... ADD COLUMNS ... commands. + */ +@Explain(displayName = "Add Columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterTableAddColumnsDesc extends AbstractAlterTableDesc { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(AlterTableAddColumnsDesc.class, AlterTableAddColumnsOperation.class); + } + + private final List newColumns; + + public AlterTableAddColumnsDesc(String tableName, Map partitionSpec, boolean isCascade, + List newColumns) throws SemanticException { + super(AlterTableTypes.ADD_COLUMNS, tableName, partitionSpec, null, isCascade, false); + this.newColumns = newColumns; + } + + public List getNewColumns() { + return newColumns; + } + + // Only for explain + @Explain(displayName = "new columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public List getNewColsString() { + return Utilities.getFieldSchemaString(newColumns); + } + + @Override + public boolean mayNeedWriteId() { + return true; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java new file mode 100644 index 0000000000..9a49d10f0c --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.util.List; + +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; +import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; + +/** + * Operation process of adding some new columns. + */ +public class AlterTableAddColumnsOperation extends AbstractAlterTableOperation { + private final AlterTableAddColumnsDesc desc; + + public AlterTableAddColumnsOperation(DDLOperationContext context, AlterTableAddColumnsDesc desc) { + super(context, desc); + this.desc = desc; + } + + @Override + protected void doAlteration(Table table, Partition partition) throws HiveException { + StorageDescriptor sd = getStorageDescriptor(table, partition); + String serializationLib = sd.getSerdeInfo().getSerializationLib(); + AvroSerdeUtils.handleAlterTableForAvro(context.getConf(), serializationLib, table.getTTable().getParameters()); + + List oldColumns = (partition == null ? table.getColsForMetastore() : partition.getColsForMetastore()); + List newColumns = desc.getNewColumns(); + + if ("org.apache.hadoop.hive.serde.thrift.columnsetSerDe".equals(serializationLib)) { + context.getConsole().printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setCols(newColumns); + } else { + // make sure the columns does not already exist + for (FieldSchema newColumn : newColumns) { + for (FieldSchema oldColumn : oldColumns) { + if (oldColumn.getName().equalsIgnoreCase(newColumn.getName())) { + throw new HiveException(ErrorMsg.DUPLICATE_COLUMN_NAMES, newColumn.getName()); + } + } + + oldColumns.add(newColumn); + } + sd.setCols(oldColumns); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java new file mode 100644 index 0000000000..827cc80d14 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.util.Map; + +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableWithConstraintsDesc; +import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; +import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for ALTER TABLE ... CHANGE COLUMN ... commands. + */ +@Explain(displayName = "Change Column", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterTableChangeColumnDesc extends AbstractAlterTableWithConstraintsDesc implements DDLDescWithWriteId { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(AlterTableChangeColumnDesc.class, AlterTableChangeColumnOperation.class); + } + + private final String oldColumnName; + private final String newColumnName; + private final String newColumnType; + private final String newColumnComment; + private final boolean first; + private final String afterColumn; + + public AlterTableChangeColumnDesc(String tableName, Map partitionSpec, boolean isCascade, + Constraints constraints, String oldColumnName, String newColumnName, String newColumnType, + String newColumnComment, boolean first, String afterColumn) throws SemanticException { + super(AlterTableTypes.RENAME_COLUMN, tableName, partitionSpec, null, isCascade, false, constraints); + + this.oldColumnName = oldColumnName; + this.newColumnName = newColumnName; + this.newColumnType = newColumnType; + this.newColumnComment = newColumnComment; + this.first = first; + this.afterColumn = afterColumn; + } + + @Explain(displayName = "old column name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getOldColumnName() { + return oldColumnName; + } + + @Explain(displayName = "new column name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getNewColumnName() { + return newColumnName; + } + + @Explain(displayName = "new column type", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getNewColumnType() { + return newColumnType; + } + + @Explain(displayName = "new column comment", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getNewColumnComment() { + return newColumnComment; + } + + @Explain(displayName = "first", displayOnlyOnTrue = true, + explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public boolean isFirst() { + return first; + } + + @Explain(displayName = "after column", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getAfterColumn() { + return afterColumn; + } + + @Override + public boolean mayNeedWriteId() { + return true; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java new file mode 100644 index 0000000000..31311ede4e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableUtils; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableOperation; +import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; + +/** + * Operation process changing a column. + */ +public class AlterTableChangeColumnOperation extends AbstractAlterTableOperation { + private final AlterTableChangeColumnDesc desc; + + public AlterTableChangeColumnOperation(DDLOperationContext context, AlterTableChangeColumnDesc desc) { + super(context, desc); + this.desc = desc; + } + + @Override + protected void doAlteration(Table table, Partition partition) throws HiveException { + StorageDescriptor sd = getStorageDescriptor(table, partition); + String serializationLib = sd.getSerdeInfo().getSerializationLib(); + AvroSerdeUtils.handleAlterTableForAvro(context.getConf(), serializationLib, table.getTTable().getParameters()); + + // if orc table, restrict reordering columns as it will break schema evolution + boolean isOrcSchemaEvolution = sd.getInputFormat().equals(OrcInputFormat.class.getName()) && + AlterTableUtils.isSchemaEvolutionEnabled(table, context.getConf()); + if (isOrcSchemaEvolution && (desc.isFirst() || StringUtils.isNotBlank(desc.getAfterColumn()))) { + throw new HiveException(ErrorMsg.CANNOT_REORDER_COLUMNS, desc.getTableName()); + } + + FieldSchema column = null; + boolean found = false; + int position = desc.isFirst() ? 0 : -1; + int i = 1; + + List oldColumns = (partition == null ? table.getColsForMetastore() : partition.getColsForMetastore()); + List newColumns = new ArrayList(); + for (FieldSchema oldColumn : oldColumns) { + String oldColumnName = oldColumn.getName(); + if (oldColumnName.equalsIgnoreCase(desc.getOldColumnName())) { + oldColumn.setName(desc.getNewColumnName()); + if (StringUtils.isNotBlank(desc.getNewColumnType())) { + oldColumn.setType(desc.getNewColumnType()); + } + if (desc.getNewColumnComment() != null) { + oldColumn.setComment(desc.getNewColumnComment()); + } + found = true; + if (desc.isFirst() || StringUtils.isNotBlank(desc.getAfterColumn())) { + column = oldColumn; + continue; + } + } else if (oldColumnName.equalsIgnoreCase(desc.getNewColumnName())) { + throw new HiveException(ErrorMsg.DUPLICATE_COLUMN_NAMES, desc.getNewColumnName()); + } + + if (oldColumnName.equalsIgnoreCase(desc.getAfterColumn())) { + position = i; + } + + i++; + newColumns.add(oldColumn); + } + + if (!found) { + throw new HiveException(ErrorMsg.INVALID_COLUMN, desc.getOldColumnName()); + } + if (StringUtils.isNotBlank(desc.getAfterColumn()) && position < 0) { + throw new HiveException(ErrorMsg.INVALID_COLUMN, desc.getAfterColumn()); + } + + if (position >= 0) { + newColumns.add(position, column); + } + + sd.setCols(newColumns); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java new file mode 100644 index 0000000000..6947c1ed51 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; +import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for ALTER TABLE ... REPLACE COLUMNS ... commands. + */ +@Explain(displayName = "Replace Columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterTableReplaceColumnsDesc extends AbstractAlterTableDesc implements DDLDescWithWriteId { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(AlterTableReplaceColumnsDesc.class, AlterTableReplaceColumnsOperation.class); + } + + private final List newColumns; + + public AlterTableReplaceColumnsDesc(String tableName, Map partitionSpec, boolean isCascade, + List newColumns) throws SemanticException { + super(AlterTableTypes.REPLACE_COLUMNS, tableName, partitionSpec, null, isCascade, false); + this.newColumns = newColumns; + } + + public List getNewColumns() { + return newColumns; + } + + // Only for explain + @Explain(displayName = "new columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public List getNewColsString() { + return Utilities.getFieldSchemaString(newColumns); + } + + @Override + public boolean mayNeedWriteId() { + return true; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java new file mode 100644 index 0000000000..116fa2d28c --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableUtils; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableOperation; +import org.apache.hadoop.hive.ql.io.orc.OrcSerde; +import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; +import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; +import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe; +import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; + +import avro.shaded.com.google.common.collect.ImmutableSet; + +/** + * Operation process of replacing two columns. + */ +public class AlterTableReplaceColumnsOperation extends AbstractAlterTableOperation { + private final AlterTableReplaceColumnsDesc desc; + + public AlterTableReplaceColumnsOperation(DDLOperationContext context, AlterTableReplaceColumnsDesc desc) { + super(context, desc); + this.desc = desc; + } + + private static final Set VALID_SERIALIZATION_LIBS = ImmutableSet.of( + MetadataTypedColumnsetSerDe.class.getName(), LazySimpleSerDe.class.getName(), ColumnarSerDe.class.getName(), + DynamicSerDe.class.getName(), ParquetHiveSerDe.class.getName(), OrcSerde.class.getName()); + + @Override + protected void doAlteration(Table table, Partition partition) throws HiveException { + StorageDescriptor sd = getStorageDescriptor(table, partition); + // change SerDe to LazySimpleSerDe if it is columnsetSerDe + String serializationLib = sd.getSerdeInfo().getSerializationLib(); + if ("org.apache.hadoop.hive.serde.thrift.columnsetSerDe".equals(serializationLib)) { + context.getConsole().printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + } else if (!VALID_SERIALIZATION_LIBS.contains(serializationLib)) { + throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, desc.getTableName()); + } + + // adding columns and limited integer type promotion is not supported for ORC schema evolution + boolean isOrcSchemaEvolution = serializationLib.equals(OrcSerde.class.getName()) && + AlterTableUtils.isSchemaEvolutionEnabled(table, context.getConf()); + if (isOrcSchemaEvolution) { + List existingCols = sd.getCols(); + List replaceCols = desc.getNewColumns(); + + if (replaceCols.size() < existingCols.size()) { + throw new HiveException(ErrorMsg.REPLACE_CANNOT_DROP_COLUMNS, desc.getTableName()); + } + } + + boolean droppingColumns = desc.getNewColumns().size() < sd.getCols().size(); + if (ParquetHiveSerDe.isParquetTable(table) && AlterTableUtils.isSchemaEvolutionEnabled(table, context.getConf()) && + !desc.isCascade() && droppingColumns && table.isPartitioned()) { + LOG.warn("Cannot drop columns from a partitioned parquet table without the CASCADE option"); + throw new HiveException(ErrorMsg.REPLACE_CANNOT_DROP_COLUMNS, desc.getTableName()); + } + + sd.setCols(desc.getNewColumns()); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java new file mode 100644 index 0000000000..f1a1ea12e5 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.util.Map; + +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; +import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for ALTER TABLE ... UPDATE COLUMNS ... commands. + */ +@Explain(displayName = "Update Columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterTableUpdateColumnsDesc extends AbstractAlterTableDesc implements DDLDescWithWriteId { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(AlterTableUpdateColumnsDesc.class, AlterTableUpdateColumnsOperation.class); + } + + public AlterTableUpdateColumnsDesc(String tableName, Map partitionSpec, boolean isCascade) + throws SemanticException { + super(AlterTableTypes.UPDATE_COLUMNS, tableName, partitionSpec, null, isCascade, false); + } + + @Override + public boolean mayNeedWriteId() { + return true; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java new file mode 100644 index 0000000000..f78e61b13c --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.util.Collection; +import java.util.List; + +import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.serde2.Deserializer; + +/** + * Operation process of adding some new columns. + */ +public class AlterTableUpdateColumnsOperation extends AbstractAlterTableOperation { + public AlterTableUpdateColumnsOperation(DDLOperationContext context, AlterTableUpdateColumnsDesc desc) { + super(context, desc); + } + + @Override + protected void doAlteration(Table table, Partition partition) throws HiveException { + //StorageDescriptor sd = getStorageDescriptor(table, partition); + String serializationLib = table.getSd().getSerdeInfo().getSerializationLib(); + + Collection serdes = MetastoreConf.getStringCollection(context.getConf(), + MetastoreConf.ConfVars.SERDES_USING_METASTORE_FOR_SCHEMA); + if (serdes.contains(serializationLib)) { + throw new HiveException(table.getTableName() + " has serde " + serializationLib + " for which schema " + + "is already handled by HMS."); + } + + Deserializer deserializer = table.getDeserializer(true); + try { + LOG.info("Updating metastore columns for table: {}", table.getTableName()); + List fields = HiveMetaStoreUtils.getFieldsFromDeserializer(table.getTableName(), deserializer); + StorageDescriptor sd = getStorageDescriptor(table, partition); + sd.setCols(fields); + } catch (org.apache.hadoop.hive.serde2.SerDeException | MetaException e) { + LOG.error("alter table update columns: {}", e); + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java similarity index 51% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java index 7047f56275..45008edda4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java @@ -15,109 +15,55 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.table.column; import java.io.Serializable; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; -public class ShowColumnsDesc extends DDLDesc implements Serializable { +/** + * DDL task description for SHOW COLUMNS commands. + */ +@Explain(displayName = "Show Columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class ShowColumnsDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - String pattern; - String tableName; - String resFile; - /** - * table name for the result of show columns. - */ - private static final String table = "show_columns"; - /** - * thrift ddl for the result of show columns. - */ - private static final String schema = "Field#string"; - public String getTable() { - return table; + static { + DDLTask2.registerOperation(ShowColumnsDesc.class, ShowColumnsOperation.class); } - public String getSchema() { - return schema; - } + public static final String SCHEMA = "Field#string"; - public ShowColumnsDesc() { - } + private final String resFile; + private final String tableName; + private final String pattern; - /** - * @param resFile - */ - public ShowColumnsDesc(Path resFile) { - this.resFile = resFile.toString(); - tableName = null; - } - - /** - * @param tableName name of table to show columns of - */ public ShowColumnsDesc(Path resFile, String tableName) { - this.resFile = resFile.toString(); - this.tableName = tableName; + this(resFile, tableName, null); } - /** - * @param tableName name of table to show columns of - */ public ShowColumnsDesc(Path resFile, String tableName, String pattern) { this.resFile = resFile.toString(); this.pattern = pattern; this.tableName = tableName; } - - /** - * @return the pattern - */ - @Explain(displayName = "pattern") + @Explain(displayName = "pattern", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getPattern() { return pattern; } - /** - * @param pattern - * the pattern to set - */ - public void setPattern(String pattern) { - this.pattern = pattern; - } - - /** - * @return the tableName - */ @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getTableName() { return tableName; } - /** - * @param tableName - * the tableName to set - */ - public void setTableName(String tableName) { - this.tableName = tableName; - } - - /** - * @return the resFile - */ @Explain(displayName = "result file", explainLevels = { Level.EXTENDED }) public String getResFile() { return resFile; } - - /** - * @param resFile - * the resFile to set - */ - public void setResFile(String resFile) { - this.resFile = resFile; - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java new file mode 100644 index 0000000000..068863df8b --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.column; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; +import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Operation process of showing the columns. + */ +public class ShowColumnsOperation extends DDLOperation { + private final ShowColumnsDesc desc; + + public ShowColumnsOperation(DDLOperationContext context, ShowColumnsDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + // write the results in the file + try (DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context)) { + List columns = getColumnsByPattern(); + writeColumns(outStream, columns); + } catch (IOException e) { + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); + } + + return 0; + } + + private List getColumnsByPattern() throws HiveException { + List columns = getCols(); + Matcher matcher = getMatcher(); + return filterColumns(columns, matcher); + } + + private List getCols() throws HiveException { + Table table = context.getDb().getTable(desc.getTableName()); + List allColumns = new ArrayList<>(); + allColumns.addAll(table.getCols()); + allColumns.addAll(table.getPartCols()); + return allColumns; + } + + private Matcher getMatcher() { + String columnPattern = desc.getPattern(); + if (columnPattern == null) { + columnPattern = "*"; + } + columnPattern = columnPattern.toLowerCase(); + columnPattern = columnPattern.replaceAll("\\*", ".*"); + + Pattern pattern = Pattern.compile(columnPattern); + return pattern.matcher(""); + } + + private List filterColumns(List columns, Matcher matcher) { + ArrayList result = new ArrayList<>(); + for (FieldSchema column : columns) { + matcher.reset(column.getName()); + if (matcher.matches()) { + result.add(column); + } + } + + result.sort( + new Comparator() { + @Override + public int compare(FieldSchema f1, FieldSchema f2) { + return f1.getName().compareTo(f2.getName()); + } + }); + + return result; + } + + private void writeColumns(DataOutputStream outStream, List columns) throws IOException { + TextMetaDataTable tmd = new TextMetaDataTable(); + for (FieldSchema fieldSchema : columns) { + tmd.addRow(MetaDataFormatUtils.extractColumnValues(fieldSchema)); + } + + // In case the query is served by HiveServer2, don't pad it with spaces, + // as HiveServer2 output is consumed by JDBC/ODBC clients. + boolean isOutputPadded = !SessionState.get().isHiveServerQuery(); + outStream.writeBytes(tmd.renderTable(isOutputPadded)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/package-info.java new file mode 100644 index 0000000000..447d61b794 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Table column related DDL operation descriptions and operations. */ +package org.apache.hadoop.hive.ql.ddl.table.column; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java new file mode 100644 index 0000000000..cd8deab596 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.ddl.table.constaint; + +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableWithConstraintsDesc; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for ALTER TABLE ... ADD CONSTRAINT ... commands. + */ +@Explain(displayName = "Add Constraint", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterTableAddConstraintDesc extends AbstractAlterTableWithConstraintsDesc { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(AlterTableAddConstraintDesc.class, AlterTableAddConstraintOperation.class); + } + + public AlterTableAddConstraintDesc(String tableName, ReplicationSpec replicationSpec, Constraints constraints) + throws SemanticException { + super(AlterTableTypes.ADD_CONSTRAINT, tableName, null, replicationSpec, false, false, constraints); + } + + @Override + public boolean mayNeedWriteId() { + return false; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java new file mode 100644 index 0000000000..f6861a5e72 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.constaint; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableWithConstraintsDesc; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of adding a new constraint. + */ +public class AlterTableAddConstraintOperation extends DDLOperation { + private final AlterTableAddConstraintDesc desc; + + public AlterTableAddConstraintOperation(DDLOperationContext context, AlterTableAddConstraintDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws Exception { + if (!DDLUtils.allowOperationInReplicationScope(context.getDb(), desc.getTableName(), null, + desc.getReplicationSpec())) { + // no alter, the table is missing either due to drop/rename which follows the alter. + // or the existing table is newer than our update. + LOG.debug("DDLTask: Alter Table is skipped as table {} is newer than update", desc.getTableName()); + return 0; + } + + addConstraints(desc, context.getDb()); + return 0; + } + + // This function is used by other operations that may modify the constraints + public static void addConstraints(AbstractAlterTableWithConstraintsDesc desc, Hive db) throws HiveException { + try { + Constraints constraints = desc.getConstraints(); + // This is either an alter table add foreign key or add primary key command. + if (CollectionUtils.isNotEmpty(constraints.getPrimaryKeys())) { + db.addPrimaryKey(constraints.getPrimaryKeys()); + } + if (CollectionUtils.isNotEmpty(constraints.getForeignKeys())) { + try { + db.addForeignKey(constraints.getForeignKeys()); + } catch (HiveException e) { + if (e.getCause() instanceof InvalidObjectException && desc.getReplicationSpec() != null && + desc.getReplicationSpec().isInReplicationScope()) { + // During repl load, NoSuchObjectException in foreign key shall + // ignore as the foreign table may not be part of the replication + LOG.debug("InvalidObjectException: ", e); + } else { + throw e; + } + } + } + if (CollectionUtils.isNotEmpty(constraints.getUniqueConstraints())) { + db.addUniqueConstraint(constraints.getUniqueConstraints()); + } + if (CollectionUtils.isNotEmpty(constraints.getNotNullConstraints())) { + db.addNotNullConstraint(constraints.getNotNullConstraints()); + } + if (CollectionUtils.isNotEmpty(constraints.getDefaultConstraints())) { + db.addDefaultConstraint(constraints.getDefaultConstraints()); + } + if (CollectionUtils.isNotEmpty(constraints.getCheckConstraints())) { + db.addCheckConstraint(constraints.getCheckConstraints()); + } + } catch (NoSuchObjectException e) { + throw new HiveException(e); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java new file mode 100644 index 0000000000..c5119c5308 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.ddl.table.constaint; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for ALTER TABLE ... DROP CONSTRAINT ... commands. + */ +@Explain(displayName = "Drop Constraint", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterTableDropConstraintDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(AlterTableDropConstraintDesc.class, AlterTableDropConstraintOperation.class); + } + + private final String tableName; + private final ReplicationSpec replicationSpec; + private final String constraintName; + + public AlterTableDropConstraintDesc(String tableName, ReplicationSpec replicationSpec, String constraintName) + throws SemanticException { + this.tableName = String.join(".", Utilities.getDbTableName(tableName)); + this.replicationSpec = replicationSpec; + this.constraintName = constraintName; + } + + @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getTableName() { + return tableName; + } + + public ReplicationSpec getReplicationSpec() { + return replicationSpec; + } + + @Explain(displayName = "constraint name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getConstraintName() { + return constraintName; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java new file mode 100644 index 0000000000..84c750ee5a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.constaint; + +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of dropping a new constraint. + */ +public class AlterTableDropConstraintOperation extends DDLOperation { + private final AlterTableDropConstraintDesc desc; + + public AlterTableDropConstraintOperation(DDLOperationContext context, AlterTableDropConstraintDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws Exception { + if (!DDLUtils.allowOperationInReplicationScope(context.getDb(), desc.getTableName(), null, + desc.getReplicationSpec())) { + // no alter, the table is missing either due to drop/rename which follows the alter. + // or the existing table is newer than our update. + LOG.debug("DDLTask: Alter Table is skipped as table {} is newer than update", desc.getTableName()); + return 0; + } + + try { + context.getDb().dropConstraint(Utilities.getDatabaseName(desc.getTableName()), + Utilities.getTableName(desc.getTableName()), desc.getConstraintName()); + } catch (NoSuchObjectException e) { + throw new HiveException(e); + } + + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/Constraints.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/Constraints.java new file mode 100644 index 0000000000..d49ed146cd --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/Constraints.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.constaint; + +import java.io.Serializable; +import java.util.List; + +import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; +import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; +import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; + +/** + * Lists of all constraint types. + */ +public class Constraints implements Serializable { + private static final long serialVersionUID = 1L; + + private final List primaryKeys; + private final List foreignKeys; + private final List notNullConstraints; + private final List uniqueConstraints; + private final List defaultConstraints; + private final List checkConstraints; + + public Constraints(List primaryKeys, List foreignKeys, + List notNullConstraints, List uniqueConstraints, + List defaultConstraints, List checkConstraints) { + this.primaryKeys = primaryKeys; + this.foreignKeys = foreignKeys; + this.notNullConstraints = notNullConstraints; + this.uniqueConstraints = uniqueConstraints; + this.defaultConstraints = defaultConstraints; + this.checkConstraints = checkConstraints; + } + + public List getPrimaryKeys() { + return primaryKeys; + } + + public List getForeignKeys() { + return foreignKeys; + } + + public List getNotNullConstraints() { + return notNullConstraints; + } + + public List getUniqueConstraints() { + return uniqueConstraints; + } + + public List getDefaultConstraints() { + return defaultConstraints; + } + + public List getCheckConstraints() { + return checkConstraints; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/package-info.java new file mode 100644 index 0000000000..fc662b3737 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Table constraint related DDL operation descriptions and operations. */ +package org.apache.hadoop.hive.ql.ddl.table.constaint; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java index cdd1777767..17d97bad5b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; - /** * DDL task description for DESC table_name commands. */ diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 0c531bed51..2e955aef9d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -24,16 +24,11 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; -import java.util.Comparator; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; @@ -53,17 +48,13 @@ import org.apache.hadoop.hive.metastore.api.CompactionResponse; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement; import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.txn.TxnStore; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; @@ -80,14 +71,11 @@ import org.apache.hadoop.hive.ql.io.merge.MergeFileWork; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcSerde; -import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.PartitionIterable; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; -import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; @@ -109,19 +97,12 @@ import org.apache.hadoop.hive.ql.plan.OrcFileMergeDesc; import org.apache.hadoop.hive.ql.plan.RCFileMergeDesc; import org.apache.hadoop.hive.ql.plan.ReplRemoveFirstIncLoadPendFlagDesc; -import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc; import org.apache.hadoop.hive.ql.plan.ShowConfDesc; import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; -import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; -import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; -import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe; -import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; -import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.tools.HadoopArchives; import org.apache.hadoop.util.ToolRunner; @@ -190,13 +171,7 @@ public int execute(DriverContext driverContext) { LOG.debug("DDLTask: Alter Table is skipped as table {} is newer than update", alterTbl.getOldName()); return 0; } - if (alterTbl.getOp() == AlterTableTypes.DROPCONSTRAINT ) { - return dropConstraint(db, alterTbl); - } else if (alterTbl.getOp() == AlterTableTypes.ADDCONSTRAINT) { - return addConstraints(db, alterTbl); - } else { - return alterTable(db, alterTbl); - } + return alterTable(db, alterTbl); } AlterTableSimpleDesc simpleDesc = work.getAlterTblSimpleDesc(); @@ -217,11 +192,6 @@ public int execute(DriverContext driverContext) { return msck(db, msckDesc); } - ShowColumnsDesc showCols = work.getShowColumnsDesc(); - if (showCols != null) { - return showColumns(db, showCols); - } - ShowConfDesc showConf = work.getShowConfDesc(); if (showConf != null) { return showConf(db, showConf); @@ -313,16 +283,6 @@ private int showConf(Hive db, ShowConfDesc showConf) throws Exception { return 0; } - private DataOutputStream getOutputStream(String resFile) throws HiveException { - try { - return getOutputStream(new Path(resFile)); - } catch (HiveException e) { - throw e; - } catch (Exception e) { - throw new HiveException(e); - } - } - private DataOutputStream getOutputStream(Path outputFile) throws HiveException { try { FileSystem fs = outputFile.getFileSystem(conf); @@ -1127,80 +1087,6 @@ private int msck(Hive db, MsckDesc msckDesc) { } } - /** - * Write a list of the columns in the table to a file. - * - * @param db - * The database in context. - * @param showCols - * A ShowColumnsDesc for columns we're interested in. - * @return Returns 0 when execution succeeds. - * @throws HiveException - * Throws this exception if an unexpected error occurs. - */ - public int showColumns(Hive db, ShowColumnsDesc showCols) - throws HiveException { - - Table table = db.getTable(showCols.getTableName()); - - // write the results in the file - DataOutputStream outStream = getOutputStream(showCols.getResFile()); - try { - List allCols = table.getCols(); - allCols.addAll(table.getPartCols()); - List cols = getColumnsByPattern(allCols,showCols.getPattern()); - // In case the query is served by HiveServer2, don't pad it with spaces, - // as HiveServer2 output is consumed by JDBC/ODBC clients. - boolean isOutputPadded = !SessionState.get().isHiveServerQuery(); - TextMetaDataTable tmd = new TextMetaDataTable(); - for (FieldSchema fieldSchema : cols) { - tmd.addRow(MetaDataFormatUtils.extractColumnValues(fieldSchema)); - } - outStream.writeBytes(tmd.renderTable(isOutputPadded)); - } catch (IOException e) { - throw new HiveException(e, ErrorMsg.GENERIC_ERROR); - } finally { - IOUtils.closeStream(outStream); - } - return 0; - } - - /** - * Returns a sorted list of columns matching a column pattern. - * - * @param cols - * Columns of a table. - * @param columnPattern - * we want to find columns similar to a column pattern. - * @return sorted list of columns. - */ - private List getColumnsByPattern(List cols, String columnPattern) { - - if(columnPattern == null) { - columnPattern = "*"; - } - columnPattern = columnPattern.toLowerCase(); - columnPattern = columnPattern.replaceAll("\\*", ".*"); - Pattern pattern = Pattern.compile(columnPattern); - Matcher matcher = pattern.matcher(""); - - SortedSet sortedCol = new TreeSet<>( new Comparator() { - @Override - public int compare(FieldSchema f1, FieldSchema f2) { - return f1.getName().compareTo(f2.getName()); - } - }); - - for(FieldSchema column : cols) { - matcher.reset(column.getName()); - if(matcher.matches()) { - sortedCol.add(column); - } - } - - return new ArrayList(sortedCol); - } - /** * Alter a given table. * @@ -1303,8 +1189,6 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { } db.alterPartitions(Warehouse.getQualifiedName(tbl.getTTable()), allPartitions, environmentContext, isTxn); } - // Add constraints if necessary - addConstraints(db, alterTbl); } catch (InvalidOperationException e) { LOG.error("alter table: ", e); throw new HiveException(e, ErrorMsg.GENERIC_ERROR); @@ -1398,152 +1282,6 @@ private static StorageDescriptor retrieveStorageDescriptor(Table tbl, Partition if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) { tbl.setDbName(Utilities.getDatabaseName(alterTbl.getNewName())); tbl.setTableName(Utilities.getTableName(alterTbl.getNewName())); - } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDCOLS) { - StorageDescriptor sd = retrieveStorageDescriptor(tbl, part); - String serializationLib = sd.getSerdeInfo().getSerializationLib(); - AvroSerdeUtils.handleAlterTableForAvro(conf, serializationLib, tbl.getTTable().getParameters()); - List oldCols = (part == null - ? tbl.getColsForMetastore() : part.getColsForMetastore()); - List newCols = alterTbl.getNewCols(); - if (serializationLib.equals( - "org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { - console - .printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe"); - sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); - sd.setCols(newCols); - } else { - // make sure the columns does not already exist - Iterator iterNewCols = newCols.iterator(); - while (iterNewCols.hasNext()) { - FieldSchema newCol = iterNewCols.next(); - String newColName = newCol.getName(); - Iterator iterOldCols = oldCols.iterator(); - while (iterOldCols.hasNext()) { - String oldColName = iterOldCols.next().getName(); - if (oldColName.equalsIgnoreCase(newColName)) { - throw new HiveException(ErrorMsg.DUPLICATE_COLUMN_NAMES, newColName); - } - } - oldCols.add(newCol); - } - sd.setCols(oldCols); - } - } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAMECOLUMN) { - StorageDescriptor sd = retrieveStorageDescriptor(tbl, part); - String serializationLib = sd.getSerdeInfo().getSerializationLib(); - AvroSerdeUtils.handleAlterTableForAvro(conf, serializationLib, tbl.getTTable().getParameters()); - List oldCols = (part == null - ? tbl.getColsForMetastore() : part.getColsForMetastore()); - List newCols = new ArrayList(); - Iterator iterOldCols = oldCols.iterator(); - String oldName = alterTbl.getOldColName(); - String newName = alterTbl.getNewColName(); - String type = alterTbl.getNewColType(); - String comment = alterTbl.getNewColComment(); - boolean first = alterTbl.getFirst(); - String afterCol = alterTbl.getAfterCol(); - // if orc table, restrict reordering columns as it will break schema evolution - boolean isOrcSchemaEvolution = - sd.getInputFormat().equals(OrcInputFormat.class.getName()) && - isSchemaEvolutionEnabled(tbl); - if (isOrcSchemaEvolution && (first || (afterCol != null && !afterCol.trim().isEmpty()))) { - throw new HiveException(ErrorMsg.CANNOT_REORDER_COLUMNS, alterTbl.getOldName()); - } - FieldSchema column = null; - - boolean found = false; - int position = -1; - if (first) { - position = 0; - } - - int i = 1; - while (iterOldCols.hasNext()) { - FieldSchema col = iterOldCols.next(); - String oldColName = col.getName(); - if (oldColName.equalsIgnoreCase(newName) - && !oldColName.equalsIgnoreCase(oldName)) { - throw new HiveException(ErrorMsg.DUPLICATE_COLUMN_NAMES, newName); - } else if (oldColName.equalsIgnoreCase(oldName)) { - col.setName(newName); - if (type != null && !type.trim().equals("")) { - col.setType(type); - } - if (comment != null) { - col.setComment(comment); - } - found = true; - if (first || (afterCol != null && !afterCol.trim().equals(""))) { - column = col; - continue; - } - } - - if (afterCol != null && !afterCol.trim().equals("") - && oldColName.equalsIgnoreCase(afterCol)) { - position = i; - } - - i++; - newCols.add(col); - } - - // did not find the column - if (!found) { - throw new HiveException(ErrorMsg.INVALID_COLUMN, oldName); - } - // after column is not null, but we did not find it. - if ((afterCol != null && !afterCol.trim().equals("")) && position < 0) { - throw new HiveException(ErrorMsg.INVALID_COLUMN, afterCol); - } - - if (position >= 0) { - newCols.add(position, column); - } - - sd.setCols(newCols); - } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.REPLACECOLS) { - StorageDescriptor sd = retrieveStorageDescriptor(tbl, part); - // change SerDe to LazySimpleSerDe if it is columnsetSerDe - String serializationLib = sd.getSerdeInfo().getSerializationLib(); - if (serializationLib.equals( - "org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { - console - .printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe"); - sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); - } else if (!serializationLib.equals( - MetadataTypedColumnsetSerDe.class.getName()) - && !serializationLib.equals(LazySimpleSerDe.class.getName()) - && !serializationLib.equals(ColumnarSerDe.class.getName()) - && !serializationLib.equals(DynamicSerDe.class.getName()) - && !serializationLib.equals(ParquetHiveSerDe.class.getName()) - && !serializationLib.equals(OrcSerde.class.getName())) { - throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName()); - } - final boolean isOrcSchemaEvolution = - serializationLib.equals(OrcSerde.class.getName()) && - isSchemaEvolutionEnabled(tbl); - // adding columns and limited integer type promotion is supported for ORC schema evolution - if (isOrcSchemaEvolution) { - final List existingCols = sd.getCols(); - final List replaceCols = alterTbl.getNewCols(); - - if (replaceCols.size() < existingCols.size()) { - throw new HiveException(ErrorMsg.REPLACE_CANNOT_DROP_COLUMNS, alterTbl.getOldName()); - } - } - - boolean partitioned = tbl.isPartitioned(); - boolean droppingColumns = alterTbl.getNewCols().size() < sd.getCols().size(); - if (ParquetHiveSerDe.isParquetTable(tbl) && - isSchemaEvolutionEnabled(tbl) && - !alterTbl.getIsCascade() && - droppingColumns && partitioned) { - LOG.warn("Cannot drop columns from a partitioned parquet table without the CASCADE option"); - throw new HiveException(ErrorMsg.REPLACE_CANNOT_DROP_COLUMNS, - alterTbl.getOldName()); - } - sd.setCols(alterTbl.getNewCols()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) { return alterTableAddProps(alterTbl, tbl, part, environmentContext); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.DROPPROPS) { @@ -1700,8 +1438,6 @@ private static StorageDescriptor retrieveStorageDescriptor(Table tbl, Partition } tbl.setNumBuckets(alterTbl.getNumberBuckets()); } - } else if (alterTbl.getOp() == AlterTableTypes.UPDATECOLUMNS) { - updateColumns(tbl, part); } else { throw new HiveException(ErrorMsg.UNSUPPORTED_ALTER_TBL_OP, alterTbl.getOp().toString()); } @@ -1851,84 +1587,6 @@ private void checkMmLb(Partition part) throws HiveException { return result; } - private int dropConstraint(Hive db, AlterTableDesc alterTbl) - throws SemanticException, HiveException { - try { - db.dropConstraint(Utilities.getDatabaseName(alterTbl.getOldName()), - Utilities.getTableName(alterTbl.getOldName()), - alterTbl.getConstraintName()); - } catch (NoSuchObjectException e) { - throw new HiveException(e); - } - return 0; - } - - private int addConstraints(Hive db, AlterTableDesc alterTbl) - throws SemanticException, HiveException { - try { - // This is either an alter table add foreign key or add primary key command. - if (alterTbl.getPrimaryKeyCols() != null && !alterTbl.getPrimaryKeyCols().isEmpty()) { - db.addPrimaryKey(alterTbl.getPrimaryKeyCols()); - } - if (alterTbl.getForeignKeyCols() != null && !alterTbl.getForeignKeyCols().isEmpty()) { - try { - db.addForeignKey(alterTbl.getForeignKeyCols()); - } catch (HiveException e) { - if (e.getCause() instanceof InvalidObjectException - && alterTbl.getReplicationSpec()!= null && alterTbl.getReplicationSpec().isInReplicationScope()) { - // During repl load, NoSuchObjectException in foreign key shall - // ignore as the foreign table may not be part of the replication - LOG.debug("InvalidObjectException: ", e); - } else { - throw e; - } - } - } - if (alterTbl.getUniqueConstraintCols() != null - && !alterTbl.getUniqueConstraintCols().isEmpty()) { - db.addUniqueConstraint(alterTbl.getUniqueConstraintCols()); - } - if (alterTbl.getNotNullConstraintCols() != null - && !alterTbl.getNotNullConstraintCols().isEmpty()) { - db.addNotNullConstraint(alterTbl.getNotNullConstraintCols()); - } - if (alterTbl.getDefaultConstraintCols() != null - && !alterTbl.getDefaultConstraintCols().isEmpty()) { - db.addDefaultConstraint(alterTbl.getDefaultConstraintCols()); - } - if (alterTbl.getCheckConstraintCols() != null - && !alterTbl.getCheckConstraintCols().isEmpty()) { - db.addCheckConstraint(alterTbl.getCheckConstraintCols()); - } - } catch (NoSuchObjectException e) { - throw new HiveException(e); - } - return 0; - } - - private int updateColumns(Table tbl, Partition part) - throws HiveException { - String serializationLib = tbl.getSd().getSerdeInfo().getSerializationLib(); - if (MetastoreConf.getStringCollection(conf, - MetastoreConf.ConfVars.SERDES_USING_METASTORE_FOR_SCHEMA).contains(serializationLib)) { - throw new HiveException(tbl.getTableName() + " has serde " + serializationLib + " for which schema " + - "is already handled by HMS."); - } - Deserializer deserializer = tbl.getDeserializer(true); - try { - LOG.info("Updating metastore columns for table: {}", tbl.getTableName()); - final List fields = HiveMetaStoreUtils.getFieldsFromDeserializer( - tbl.getTableName(), deserializer); - StorageDescriptor sd = retrieveStorageDescriptor(tbl, part); - sd.setCols(fields); - } catch (org.apache.hadoop.hive.serde2.SerDeException | MetaException e) { - LOG.error("alter table update columns: {}", e); - throw new HiveException(e, ErrorMsg.GENERIC_ERROR); - } - - return 0; - } - /** * Update last_modified_by and last_modified_time parameters in parameter map. * diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java index 3afa201fbc..86dfef091c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java @@ -200,12 +200,12 @@ public boolean isTempURI() { */ public static WriteType determineAlterTableWriteType(AlterTableDesc.AlterTableTypes op) { switch (op) { - case RENAMECOLUMN: + case RENAME_COLUMN: case ADDCLUSTERSORTCOLUMN: case ADDFILEFORMAT: case ADDSERDE: case DROPPROPS: - case REPLACECOLS: + case REPLACE_COLUMNS: case ARCHIVE: case UNARCHIVE: case ALTERLOCATION: @@ -215,11 +215,11 @@ public static WriteType determineAlterTableWriteType(AlterTableDesc.AlterTableTy case ALTERSKEWEDLOCATION: case ALTERBUCKETNUM: case ALTERPARTITION: - case ADDCOLS: + case ADD_COLUMNS: case RENAME: case TRUNCATE: case MERGEFILES: - case DROPCONSTRAINT: return WriteType.DDL_EXCLUSIVE; + case DROP_CONSTRAINT: return WriteType.DDL_EXCLUSIVE; case ADDPARTITION: case ADDSERDEPROPS: diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 99d7f21228..50304e2374 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -91,6 +91,15 @@ import org.apache.hadoop.hive.ql.ddl.process.KillQueriesDesc; import org.apache.hadoop.hive.ql.ddl.process.ShowCompactionsDesc; import org.apache.hadoop.hive.ql.ddl.process.ShowTransactionsDesc; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableAddColumnsDesc; +import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableChangeColumnDesc; +import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableReplaceColumnsDesc; +import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableUpdateColumnsDesc; +import org.apache.hadoop.hive.ql.ddl.table.column.ShowColumnsDesc; +import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; +import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableDropConstraintDesc; +import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.ddl.table.creation.DropTableDesc; import org.apache.hadoop.hive.ql.ddl.table.creation.ShowCreateTableDesc; import org.apache.hadoop.hive.ql.ddl.table.info.DescTableDesc; @@ -171,7 +180,6 @@ import org.apache.hadoop.hive.ql.plan.MoveWork; import org.apache.hadoop.hive.ql.plan.MsckDesc; import org.apache.hadoop.hive.ql.plan.PlanUtils; -import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc; import org.apache.hadoop.hive.ql.plan.ShowConfDesc; import org.apache.hadoop.hive.ql.plan.StatsWork; import org.apache.hadoop.hive.ql.plan.TableDesc; @@ -314,9 +322,9 @@ public void analyzeInternal(ASTNode input) throws SemanticException { } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { analyzeAlterTableArchive(qualified, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { - analyzeAlterTableModifyCols(qualified, ast, partSpec, AlterTableTypes.ADDCOLS); + analyzeAlterTableAddCols(qualified, ast, partSpec); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { - analyzeAlterTableModifyCols(qualified, ast, partSpec, AlterTableTypes.REPLACECOLS); + analyzeAlterTableReplaceCols(qualified, ast, partSpec); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { analyzeAlterTableRenameCol(catName, qualified, ast, partSpec); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) { @@ -1797,11 +1805,11 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ rootTasks.add(TaskFactory.get(ddlWork)); } - private void setAcidDdlDesc(DDLDescWithWriteId alterTblDesc) { + private void setAcidDdlDesc(DDLDescWithWriteId descWithWriteId) { if(this.ddlDescWithWriteId != null) { throw new IllegalStateException("ddlDescWithWriteId is already set: " + this.ddlDescWithWriteId); } - this.ddlDescWithWriteId = alterTblDesc; + this.ddlDescWithWriteId = descWithWriteId; } @Override @@ -1876,9 +1884,24 @@ private WriteType determineAlterTableWriteType(Table tab, AlterTableDesc desc, A } return WriteEntity.determineAlterTableWriteType(op); } + + // For the time while all the alter table operations are getting migrated there is a duplication of this method here + private WriteType determineAlterTableWriteType(Table tab, AbstractAlterTableDesc desc, AlterTableTypes op) { + boolean convertingToAcid = false; + if (desc != null && desc.getProps() != null && + Boolean.parseBoolean(desc.getProps().get(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL))) { + convertingToAcid = true; + } + if(!AcidUtils.isTransactionalTable(tab) && convertingToAcid) { + //non-acid to transactional conversion (property itself) must be mutexed to prevent concurrent writes. + // See HIVE-16688 for use cases. + return WriteType.DDL_EXCLUSIVE; + } + return WriteEntity.determineAlterTableWriteType(op); + } private void addInputsOutputsAlterTable(String tableName, Map partSpec, AlterTableTypes op) throws SemanticException { - addInputsOutputsAlterTable(tableName, partSpec, null, op, false); + addInputsOutputsAlterTable(tableName, partSpec, (AlterTableDesc)null, op, false); } private void addInputsOutputsAlterTable(String tableName, Map partSpec, @@ -1968,6 +1991,84 @@ private void addInputsOutputsAlterTable(String tableName, Map pa } } + // For the time while all the alter table operations are getting migrated there is a duplication of this method here + private void addInputsOutputsAlterTable(String tableName, Map partSpec, + AbstractAlterTableDesc desc, AlterTableTypes op, boolean doForceExclusive) throws SemanticException { + boolean isCascade = desc != null && desc.isCascade(); + boolean alterPartitions = partSpec != null && !partSpec.isEmpty(); + //cascade only occurs at table level then cascade to partition level + if (isCascade && alterPartitions) { + throw new SemanticException( + ErrorMsg.ALTER_TABLE_PARTITION_CASCADE_NOT_SUPPORTED, op.getName()); + } + + Table tab = getTable(tableName, true); + // cascade only occurs with partitioned table + if (isCascade && !tab.isPartitioned()) { + throw new SemanticException( + ErrorMsg.ALTER_TABLE_NON_PARTITIONED_TABLE_CASCADE_NOT_SUPPORTED); + } + + // Determine the lock type to acquire + WriteEntity.WriteType writeType = doForceExclusive + ? WriteType.DDL_EXCLUSIVE : determineAlterTableWriteType(tab, desc, op); + + if (!alterPartitions) { + inputs.add(new ReadEntity(tab)); + alterTableOutput = new WriteEntity(tab, writeType); + outputs.add(alterTableOutput); + //do not need the lock for partitions since they are covered by the table lock + if (isCascade) { + for (Partition part : getPartitions(tab, partSpec, false)) { + outputs.add(new WriteEntity(part, WriteEntity.WriteType.DDL_NO_LOCK)); + } + } + } else { + ReadEntity re = new ReadEntity(tab); + // In the case of altering a table for its partitions we don't need to lock the table + // itself, just the partitions. But the table will have a ReadEntity. So mark that + // ReadEntity as no lock. + re.noLockNeeded(); + inputs.add(re); + + if (isFullSpec(tab, partSpec)) { + // Fully specified partition spec + Partition part = getPartition(tab, partSpec, true); + outputs.add(new WriteEntity(part, writeType)); + } else { + // Partial partition spec supplied. Make sure this is allowed. + if (!AlterTableDesc.doesAlterTableTypeSupportPartialPartitionSpec(op)) { + throw new SemanticException( + ErrorMsg.ALTER_TABLE_TYPE_PARTIAL_PARTITION_SPEC_NO_SUPPORTED, op.getName()); + } else if (!conf.getBoolVar(HiveConf.ConfVars.DYNAMICPARTITIONING)) { + throw new SemanticException(ErrorMsg.DYNAMIC_PARTITION_DISABLED); + } + + for (Partition part : getPartitions(tab, partSpec, true)) { + outputs.add(new WriteEntity(part, writeType)); + } + } + } + + if (desc != null) { + validateAlterTableType(tab, op, desc.expectView()); + + // validate Unset Non Existed Table Properties +/* if (op == AlterTableDesc.AlterTableTypes.DROPPROPS && !desc.getIsDropIfExists()) { + Map tableParams = tab.getTTable().getParameters(); + for (String currKey : desc.getProps().keySet()) { + if (!tableParams.containsKey(currKey)) { + String errorMsg = + "The following property " + currKey + + " does not exist in " + tab.getTableName(); + throw new SemanticException( + ErrorMsg.ALTER_TBL_UNSET_NON_EXIST_PROPERTY.getMsg(errorMsg)); + } + } + }*/ + } + } + private void analyzeAlterTableOwner(ASTNode ast, String tableName) throws SemanticException { PrincipalDesc ownerPrincipal = AuthorizationParseUtils.getPrincipalDesc((ASTNode) ast.getChild(0)); @@ -2231,11 +2332,10 @@ private void analyzeAlterTableCompact(ASTNode ast, String tableName, private void analyzeAlterTableDropConstraint(ASTNode ast, String tableName) throws SemanticException { - String dropConstraintName = unescapeIdentifier(ast.getChild(0).getText()); - AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, dropConstraintName, (ReplicationSpec)null); + String constraintName = unescapeIdentifier(ast.getChild(0).getText()); + AlterTableDropConstraintDesc alterTblDesc = new AlterTableDropConstraintDesc(tableName, null, constraintName); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); } private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) @@ -2273,11 +2373,12 @@ private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( child.getToken().getText())); } - AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys, - uniqueConstraints, null, null, checkConstraints, null); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc))); + Constraints constraints = new Constraints(primaryKeys, foreignKeys, null, uniqueConstraints, null, + checkConstraints); + AlterTableAddConstraintDesc alterTblDesc = new AlterTableAddConstraintDesc(tableName, null, constraints); + + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); } private void analyzeAlterTableUpdateColumns(ASTNode ast, String tableName, @@ -2288,17 +2389,13 @@ private void analyzeAlterTableUpdateColumns(ASTNode ast, String tableName, isCascade = true; } - AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.UPDATECOLUMNS); - alterTblDesc.setOldName(tableName); - alterTblDesc.setIsCascade(isCascade); - alterTblDesc.setPartSpec(partSpec); + AlterTableUpdateColumnsDesc alterTblDesc = new AlterTableUpdateColumnsDesc(tableName, partSpec, isCascade); Table tbl = getTable(tableName); if (AcidUtils.isTransactionalTable(tbl)) { setAcidDdlDesc(alterTblDesc); } - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc), conf)); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc), conf)); } static HashMap getProps(ASTNode prop) { @@ -2729,9 +2826,8 @@ private void analyzeShowColumns(ASTNode ast) throws SemanticException { Table tab = getTable(tableName); inputs.add(new ReadEntity(tab)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - showColumnsDesc))); - setFetchTask(createFetchTask(showColumnsDesc.getSchema())); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showColumnsDesc))); + setFetchTask(createFetchTask(ShowColumnsDesc.SCHEMA)); } private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { @@ -3279,28 +3375,18 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN } String tblName = getDotName(qualified); - AlterTableDesc alterTblDesc; - if (primaryKeys == null && foreignKeys == null - && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null - && checkConstraints == null) { - alterTblDesc = new AlterTableDesc(tblName, partSpec, - unescapeIdentifier(oldColName), unescapeIdentifier(newColName), - newType, newComment, first, flagCol, isCascade); - } else { - alterTblDesc = new AlterTableDesc(tblName, partSpec, - unescapeIdentifier(oldColName), unescapeIdentifier(newColName), - newType, newComment, first, flagCol, isCascade, - primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints); - } - addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); + Constraints constraints = new Constraints(primaryKeys, foreignKeys, notNullConstraints, uniqueConstraints, + defaultConstraints, checkConstraints); + AlterTableChangeColumnDesc alterTblDesc = new AlterTableChangeColumnDesc(tblName, partSpec, isCascade, constraints, + unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol); + addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc, alterTblDesc.getType(), false); if (AcidUtils.isTransactionalTable(tab)) { // Note: we might actually need it only when certain changes (e.g. name or type?) are made. setAcidDdlDesc(alterTblDesc); } - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); } private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, @@ -3344,8 +3430,8 @@ private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, alterBucketNum))); } - private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast, - HashMap partSpec, AlterTableTypes alterType) throws SemanticException { + private void analyzeAlterTableAddCols(String[] qualified, ASTNode ast, Map partSpec) + throws SemanticException { String tblName = getDotName(qualified); List newCols = getColumns((ASTNode) ast.getChild(0)); @@ -3354,16 +3440,34 @@ private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast, isCascade = true; } - AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, newCols, - alterType, isCascade); + AlterTableAddColumnsDesc desc = new AlterTableAddColumnsDesc(tblName, partSpec, isCascade, newCols); + Table table = getTable(tblName, true); + if (AcidUtils.isTransactionalTable(table)) { + setAcidDdlDesc(desc); + } + + addInputsOutputsAlterTable(tblName, partSpec, desc, desc.getType(), false); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + } + + private void analyzeAlterTableReplaceCols(String[] qualified, ASTNode ast, Map partSpec) + throws SemanticException { + + String tblName = getDotName(qualified); + List newCols = getColumns((ASTNode) ast.getChild(0)); + boolean isCascade = false; + if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { + isCascade = true; + } + + AlterTableReplaceColumnsDesc alterTblDesc = new AlterTableReplaceColumnsDesc(tblName, partSpec, isCascade, newCols); Table table = getTable(tblName, true); if (AcidUtils.isTransactionalTable(table)) { setAcidDdlDesc(alterTblDesc); } - addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - alterTblDesc))); + addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc, alterTblDesc.getType(), false); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); } private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean expectView) diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java index bba769244b..b2e90fe752 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java @@ -23,14 +23,13 @@ import java.util.List; import org.apache.hadoop.hive.metastore.api.SQLForeignKey; -import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; -import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; +import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; public class AddForeignKeyHandler extends AbstractMessageHandler { @Override @@ -67,13 +66,14 @@ fk.setFktable_name(actualTblName); } - AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, new ArrayList(), fks, - new ArrayList(), context.eventOnlyReplicationSpec()); - Task addConstraintsTask = TaskFactory.get( - new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); + Constraints constraints = new Constraints(null, fks, null, null, null, null); + AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(actualDbName + "." + actualTblName, + context.eventOnlyReplicationSpec(), constraints); + Task addConstraintsTask = TaskFactory.get( + new DDLWork2(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); tasks.add(addConstraintsTask); context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); - return Collections.singletonList(addConstraintsTask); + return Collections.singletonList(addConstraintsTask); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java index 90d9008a31..4273e445fb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java @@ -22,18 +22,14 @@ import java.util.Collections; import java.util.List; -import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; -import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; -import org.apache.hadoop.hive.metastore.api.SQLForeignKey; import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; -import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; -import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; +import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; public class AddNotNullConstraintHandler extends AbstractMessageHandler { @Override @@ -65,18 +61,14 @@ nn.setTable_name(actualTblName); } - AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, - new ArrayList(), - new ArrayList(), - new ArrayList(), - nns, new ArrayList(), - new ArrayList(), - context.eventOnlyReplicationSpec()); - Task addConstraintsTask = TaskFactory.get( - new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); + Constraints constraints = new Constraints(null, null, nns, null, null, null); + AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(actualDbName + "." + actualTblName, + context.eventOnlyReplicationSpec(), constraints); + Task addConstraintsTask = TaskFactory.get( + new DDLWork2(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); tasks.add(addConstraintsTask); context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); - return Collections.singletonList(addConstraintsTask); + return Collections.singletonList(addConstraintsTask); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java index e8966ad7c4..6cb4722dd4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java @@ -22,15 +22,14 @@ import java.util.Collections; import java.util.List; -import org.apache.hadoop.hive.metastore.api.SQLForeignKey; import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; -import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; +import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; public class AddPrimaryKeyHandler extends AbstractMessageHandler { @Override @@ -62,13 +61,14 @@ pk.setTable_name(actualTblName); } - AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, pks, new ArrayList(), - new ArrayList(), context.eventOnlyReplicationSpec()); - Task addConstraintsTask = TaskFactory.get( - new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); + Constraints constraints = new Constraints(pks, null, null, null, null, null); + AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(actualDbName + "." + actualTblName, + context.eventOnlyReplicationSpec(), constraints); + Task addConstraintsTask = TaskFactory.get( + new DDLWork2(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); tasks.add(addConstraintsTask); context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); - return Collections.singletonList(addConstraintsTask); + return Collections.singletonList(addConstraintsTask); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java index 81f1c5ab20..9b010d7c74 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java @@ -22,15 +22,14 @@ import java.util.Collections; import java.util.List; -import org.apache.hadoop.hive.metastore.api.SQLForeignKey; -import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; +import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; public class AddUniqueConstraintHandler extends AbstractMessageHandler { @Override @@ -62,13 +61,14 @@ uk.setTable_name(actualTblName); } - AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, new ArrayList(), new ArrayList(), - uks, context.eventOnlyReplicationSpec()); - Task addConstraintsTask = TaskFactory.get( - new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); + Constraints constraints = new Constraints(null, null, null, uks, null, null); + AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(actualDbName + "." + actualTblName, + context.eventOnlyReplicationSpec(), constraints); + Task addConstraintsTask = TaskFactory.get( + new DDLWork2(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); tasks.add(addConstraintsTask); context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); - return Collections.singletonList(addConstraintsTask); + return Collections.singletonList(addConstraintsTask); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java index 5f9f879f6f..def207eec0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hive.ql.parse.repl.load.message; import org.apache.hadoop.hive.metastore.messaging.DropConstraintMessage; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableDropConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; import java.io.Serializable; import java.util.Collections; @@ -37,12 +37,12 @@ String actualTblName = context.isTableNameEmpty() ? msg.getTable() : context.tableName; String constraintName = msg.getConstraint(); - AlterTableDesc dropConstraintsDesc = new AlterTableDesc(actualDbName + "." + actualTblName, constraintName, - context.eventOnlyReplicationSpec()); - Task dropConstraintsTask = TaskFactory.get( - new DDLWork(readEntitySet, writeEntitySet, dropConstraintsDesc), context.hiveConf); + AlterTableDropConstraintDesc dropConstraintsDesc = new AlterTableDropConstraintDesc( + actualDbName + "." + actualTblName, context.eventOnlyReplicationSpec(), constraintName); + Task dropConstraintsTask = TaskFactory.get( + new DDLWork2(readEntitySet, writeEntitySet, dropConstraintsDesc), context.hiveConf); context.log.debug("Added drop constrain task : {}:{}", dropConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); - return Collections.singletonList(dropConstraintsTask); + return Collections.singletonList(dropConstraintsTask); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index 8603521041..cb6958a327 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -21,14 +21,7 @@ import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; -import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; -import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; -import org.apache.hadoop.hive.metastore.api.SQLForeignKey; -import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; -import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; -import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Table; @@ -58,10 +51,10 @@ * */ public static enum AlterTableTypes { - RENAME("rename"), ADDCOLS("add columns"), REPLACECOLS("replace columns"), + RENAME("rename"), ADD_COLUMNS("add columns"), REPLACE_COLUMNS("replace columns"), ADDPROPS("add props"), DROPPROPS("drop props"), ADDSERDE("add serde"), ADDSERDEPROPS("add serde props"), ADDFILEFORMAT("add fileformat"), ADDCLUSTERSORTCOLUMN("add cluster sort column"), - RENAMECOLUMN("rename column"), ADDPARTITION("add partition"), TOUCH("touch"), ARCHIVE("archieve"), + RENAME_COLUMN("rename column"), ADDPARTITION("add partition"), TOUCH("touch"), ARCHIVE("archieve"), UNARCHIVE("unarchieve"), ALTERLOCATION("alter location"), DROPPARTITION("drop partition"), RENAMEPARTITION("rename partition"), // Note: used in RenamePartitionDesc, not here. @@ -69,8 +62,9 @@ ALTERSKEWEDLOCATION("alter skew location"), ALTERBUCKETNUM("alter bucket number"), ALTERPARTITION("alter partition"), // Note: this is never used in AlterTableDesc. COMPACT("compact"), - TRUNCATE("truncate"), MERGEFILES("merge files"), DROPCONSTRAINT("drop constraint"), ADDCONSTRAINT("add constraint"), - UPDATECOLUMNS("update columns"), OWNER("set owner"), + TRUNCATE("truncate"), MERGEFILES("merge files"), DROP_CONSTRAINT("drop constraint"), + ADD_CONSTRAINT("add constraint"), + UPDATE_COLUMNS("update columns"), OWNER("set owner"), UPDATESTATS("update stats"); // Note: used in ColumnStatsUpdateWork, not here. ; @@ -79,7 +73,7 @@ public String getName() { return name; } public static final List nonNativeTableAllowedTypes = - ImmutableList.of(ADDPROPS, DROPPROPS, ADDCOLS); + ImmutableList.of(ADDPROPS, DROPPROPS, ADD_COLUMNS); } public static enum ProtectModeType { @@ -90,9 +84,9 @@ new HashSet(); static { - alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.ADDCOLS); - alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.REPLACECOLS); - alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.RENAMECOLUMN); + alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.ADD_COLUMNS); + alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.REPLACE_COLUMNS); + alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.RENAME_COLUMN); alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.ADDPROPS); alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.DROPPROPS); alterTableTypesWithPartialSpec.add(AlterTableDesc.AlterTableTypes.ADDSERDE); @@ -103,7 +97,6 @@ AlterTableTypes op; String oldName; String newName; - ArrayList newCols; String serdeName; Map props; String inputFormat; @@ -117,8 +110,6 @@ String newColName; String newColType; String newColComment; - boolean first; - String afterCol; boolean expectView; HashMap partSpec; private String newLocation; @@ -134,13 +125,6 @@ boolean isTurnOffSorting = false; boolean isCascade = false; EnvironmentContext environmentContext; - String dropConstraintName; - List primaryKeyCols; - List foreignKeyCols; - List uniqueConstraintCols; - List notNullConstraintCols; - List defaultConstraintsCols; - List checkConstraintsCols; ReplicationSpec replicationSpec; private Long writeId = null; PrincipalDesc ownerPrincipal; @@ -149,58 +133,6 @@ public AlterTableDesc() { } - /** - * @param tblName - * table name - * @param oldColName - * old column name - * @param newColName - * new column name - * @param newComment - * @param newType - * @throws SemanticException - */ - public AlterTableDesc(String tblName, HashMap partSpec, - String oldColName, String newColName, String newType, String newComment, - boolean first, String afterCol, boolean isCascade) throws SemanticException { - super(); - setOldName(tblName); - this.partSpec = partSpec; - this.oldColName = oldColName; - this.newColName = newColName; - newColType = newType; - newColComment = newComment; - this.first = first; - this.afterCol = afterCol; - op = AlterTableTypes.RENAMECOLUMN; - this.isCascade = isCascade; - } - - public AlterTableDesc(String tblName, HashMap partSpec, - String oldColName, String newColName, String newType, String newComment, - boolean first, String afterCol, boolean isCascade, List primaryKeyCols, - List foreignKeyCols, List uniqueConstraintCols, - List notNullConstraintCols, List defaultConstraints, - List checkConstraints) throws SemanticException { - super(); - setOldName(tblName); - this.partSpec = partSpec; - this.oldColName = oldColName; - this.newColName = newColName; - newColType = newType; - newColComment = newComment; - this.first = first; - this.afterCol = afterCol; - op = AlterTableTypes.RENAMECOLUMN; - this.isCascade = isCascade; - this.primaryKeyCols = primaryKeyCols; - this.foreignKeyCols = foreignKeyCols; - this.uniqueConstraintCols = uniqueConstraintCols; - this.notNullConstraintCols = notNullConstraintCols; - this.defaultConstraintsCols = defaultConstraints; - this.checkConstraintsCols = checkConstraints; - } - /** * @param oldName * old name of the table @@ -220,22 +152,6 @@ public AlterTableDesc(String oldName, String newName, boolean expectView, Replic this.replicationSpec = replicationSpec; } - /** - * @param name - * name of the table - * @param newCols - * new columns to be added - * @throws SemanticException - */ - public AlterTableDesc(String name, HashMap partSpec, List newCols, - AlterTableTypes alterType, boolean isCascade) throws SemanticException { - op = alterType; - setOldName(name); - this.newCols = new ArrayList(newCols); - this.partSpec = partSpec; - this.isCascade = isCascade; - } - /** * @param alterType * type of alter op @@ -341,39 +257,6 @@ public AlterTableDesc(String tableName, HashMap partSpec, int nu this.numberBuckets = numBuckets; } - public AlterTableDesc(String tableName, String dropConstraintName, ReplicationSpec replicationSpec) throws SemanticException { - setOldName(tableName); - this.dropConstraintName = dropConstraintName; - this.replicationSpec = replicationSpec; - op = AlterTableTypes.DROPCONSTRAINT; - } - - public AlterTableDesc(String tableName, List primaryKeyCols, - List foreignKeyCols, List uniqueConstraintCols, - ReplicationSpec replicationSpec) throws SemanticException { - setOldName(tableName); - this.primaryKeyCols = primaryKeyCols; - this.foreignKeyCols = foreignKeyCols; - this.uniqueConstraintCols = uniqueConstraintCols; - this.replicationSpec = replicationSpec; - op = AlterTableTypes.ADDCONSTRAINT; - } - - public AlterTableDesc(String tableName, List primaryKeyCols, - List foreignKeyCols, List uniqueConstraintCols, - List notNullConstraintCols, List defaultConstraints, - List checkConstraints, ReplicationSpec replicationSpec) throws SemanticException { - setOldName(tableName); - this.primaryKeyCols = primaryKeyCols; - this.foreignKeyCols = foreignKeyCols; - this.uniqueConstraintCols = uniqueConstraintCols; - this.notNullConstraintCols = notNullConstraintCols; - this.defaultConstraintsCols = defaultConstraints; - this.checkConstraintsCols = checkConstraints; - this.replicationSpec = replicationSpec; - op = AlterTableTypes.ADDCONSTRAINT; - } - public AlterTableDesc(String tableName, PrincipalDesc ownerPrincipal) { op = AlterTableTypes.OWNER; this.oldName = tableName; @@ -392,11 +275,6 @@ public PrincipalDesc getOwnerPrincipal() { return this.ownerPrincipal; } - @Explain(displayName = "new columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public List getNewColsString() { - return Utilities.getFieldSchemaString(getNewCols()); - } - @Explain(displayName = "type", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getAlterTableTypeString() { return op.getName(); @@ -450,21 +328,6 @@ public void setOp(AlterTableTypes op) { this.op = op; } - /** - * @return the newCols - */ - public ArrayList getNewCols() { - return newCols; - } - - /** - * @param newCols - * the newCols to set - */ - public void setNewCols(ArrayList newCols) { - this.newCols = newCols; - } - /** * @return the serdeName */ @@ -537,78 +400,6 @@ public String getStorageHandler() { return storageHandler; } - /** - * @param primaryKeyCols - * the primary key cols to set - */ - public void setPrimaryKeyCols(List primaryKeyCols) { - this.primaryKeyCols = primaryKeyCols; - } - - /** - * @return the primary key cols - */ - public List getPrimaryKeyCols() { - return primaryKeyCols; - } - - /** - * @param foreignKeyCols - * the foreign key cols to set - */ - public void setForeignKeyCols(List foreignKeyCols) { - this.foreignKeyCols = foreignKeyCols; - } - - /** - * @return the foreign key cols - */ - public List getForeignKeyCols() { - return foreignKeyCols; - } - - /** - * @return the unique constraint cols - */ - public List getUniqueConstraintCols() { - return uniqueConstraintCols; - } - - /** - * @return the not null constraint cols - */ - public List getNotNullConstraintCols() { - return notNullConstraintCols; - } - - /** - * @return the default constraint cols - */ - public List getDefaultConstraintCols() { - return defaultConstraintsCols; - } - - /** - * @return the check constraint cols - */ - public List getCheckConstraintCols() { return checkConstraintsCols; } - - /** - * @return the drop constraint name of the table - */ - @Explain(displayName = "drop constraint name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getConstraintName() { - return dropConstraintName; - } - - /** - * @param constraintName - * the dropConstraintName to set - */ - public void setDropConstraintName(String constraintName) { - this.dropConstraintName = constraintName; - } - /** * @param storageHandler * the storage handler to set @@ -722,36 +513,6 @@ public void setNewColComment(String newComment) { newColComment = newComment; } - /** - * @return if the column should be changed to position 0 - */ - public boolean getFirst() { - return first; - } - - /** - * @param first - * set the column to position 0 - */ - public void setFirst(boolean first) { - this.first = first; - } - - /** - * @return the column's after position - */ - public String getAfterCol() { - return afterCol; - } - - /** - * @param afterCol - * set the column's after position - */ - public void setAfterCol(String afterCol) { - this.afterCol = afterCol; - } - /** * @return whether to expect a view being altered */ @@ -974,12 +735,12 @@ public boolean mayNeedWriteId() { case DROPPROPS: return isExplicitStatsUpdate; // The check for the following ones is performed before setting AlterTableDesc into the acid field. // These need write ID and stuff because they invalidate column stats. - case RENAMECOLUMN: + case RENAME_COLUMN: case RENAME: - case REPLACECOLS: - case ADDCOLS: + case REPLACE_COLUMNS: + case ADD_COLUMNS: case ALTERLOCATION: - case UPDATECOLUMNS: return true; + case UPDATE_COLUMNS: return true; // RENAMEPARTITION is handled in RenamePartitionDesc default: return false; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index 6cd84bb8ab..07feae32e7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -35,7 +35,6 @@ // TODO: this can probably be replaced with much less code via dynamic dispatch and/or templates. private InsertCommitHookDesc insertCommitHookDesc; private AlterTableDesc alterTblDesc; - private ShowColumnsDesc showColumnsDesc; private AlterTableSimpleDesc alterTblSimpleDesc; private MsckDesc msckDesc; @@ -80,16 +79,6 @@ public DDLWork(HashSet inputs, HashSet outputs, this.alterTblDesc = alterTblDesc; } - /** - * @param showColumnsDesc - */ - public DDLWork(HashSet inputs, HashSet outputs, - ShowColumnsDesc showColumnsDesc) { - this(inputs, outputs); - - this.showColumnsDesc = showColumnsDesc; - } - /** * @param inputs * @param outputs @@ -142,14 +131,6 @@ public AlterTableDesc getAlterTblDesc() { return alterTblDesc; } - /** - * @return the showColumnsDesc - */ - @Explain(displayName = "Show Columns Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public ShowColumnsDesc getShowColumnsDesc() { - return showColumnsDesc; - } - /** * @return information about the table/partitions we want to alter. */ diff --git ql/src/test/queries/clientpositive/allow_change_col_type_par.q ql/src/test/queries/clientpositive/allow_change_col_type_par.q index aad63705f7..ae2b8d5c93 100644 --- ql/src/test/queries/clientpositive/allow_change_col_type_par.q +++ ql/src/test/queries/clientpositive/allow_change_col_type_par.q @@ -13,4 +13,5 @@ set hive.metastore.disallow.incompatible.col.type.changes; set metaconf:hive.metastore.disallow.incompatible.col.type.changes; -- Change int to small int now allowed. +explain alter table t1_n14 change column c1 c1 smallint; alter table t1_n14 change column c1 c1 smallint; diff --git ql/src/test/queries/clientpositive/avro_alter_table_update_columns.q ql/src/test/queries/clientpositive/avro_alter_table_update_columns.q index 279d05d2e3..5b0bad5d75 100644 --- ql/src/test/queries/clientpositive/avro_alter_table_update_columns.q +++ ql/src/test/queries/clientpositive/avro_alter_table_update_columns.q @@ -34,6 +34,7 @@ ALTER TABLE avro_extschema_literal_n1 SET "fields": [ { "name":"newCol", "type":"int" } ] }'); +EXPLAIN ALTER TABLE avro_extschema_literal_n1 UPDATE COLUMNS CASCADE; ALTER TABLE avro_extschema_literal_n1 UPDATE COLUMNS CASCADE; DESCRIBE avro_extschema_literal_n1; @@ -85,6 +86,7 @@ DESCRIBE avro_extschema_url_parted; --case: partial partition spec ALTER TABLE avro_extschema_url_parted SET TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad2.avsc'); +EXPLAIN ALTER TABLE avro_extschema_url_parted PARTITION (p1=2018) UPDATE COLUMNS; ALTER TABLE avro_extschema_url_parted PARTITION (p1=2018) UPDATE COLUMNS; ALTER TABLE avro_extschema_url_parted UNSET TBLPROPERTIES ('avro.schema.url'); @@ -113,4 +115,4 @@ ALTER TABLE avro_extschema_url_parted UNSET TBLPROPERTIES ('avro.schema.url'); DESCRIBE avro_extschema_url_parted; DESCRIBE avro_extschema_url_parted PARTITION (p1=2017, p2=11); DESCRIBE avro_extschema_url_parted PARTITION (p1=2018, p2=2); -DESCRIBE avro_extschema_url_parted PARTITION (p1=2018, p2=3); \ No newline at end of file +DESCRIBE avro_extschema_url_parted PARTITION (p1=2018, p2=3); diff --git ql/src/test/queries/clientpositive/check_constraint.q ql/src/test/queries/clientpositive/check_constraint.q index 202110259d..08a153e867 100644 --- ql/src/test/queries/clientpositive/check_constraint.q +++ ql/src/test/queries/clientpositive/check_constraint.q @@ -32,6 +32,7 @@ Drop table tudf; -- multiple constraints create table tmulti(url string NOT NULL ENABLE, userName string, numClicks int CHECK (numClicks > 0), d date); +explain alter table tmulti add constraint un1 UNIQUE (userName, numClicks) DISABLE; alter table tmulti add constraint un1 UNIQUE (userName, numClicks) DISABLE; DESC formatted tmulti; EXPLAIN INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018'); @@ -125,6 +126,7 @@ select * from acid_uami_n0 order by de desc limit 15; explain update acid_uami_n0 set de = 893.14 where de = 103.00 or de = 119.00; update acid_uami_n0 set de = 893.14 where de = 103.00 or de = 119.00; select * from acid_uami_n0 order by de desc limit 15; +explain ALTER table acid_uami_n0 drop constraint ch2; ALTER table acid_uami_n0 drop constraint ch2; explain update acid_uami_n0 set vc = 'apache_hive' where de = 893.14 ; update acid_uami_n0 set vc = 'apache_hive' where de = 893.14 ; diff --git ql/src/test/queries/clientpositive/rename_column.q ql/src/test/queries/clientpositive/rename_column.q index 96daf9d658..82036f68b0 100644 --- ql/src/test/queries/clientpositive/rename_column.q +++ ql/src/test/queries/clientpositive/rename_column.q @@ -7,15 +7,18 @@ set hive.metastore.disallow.incompatible.col.type.changes=false; ALTER TABLE kv_rename_test CHANGE a a1 INT; DESCRIBE kv_rename_test; +EXPLAIN ALTER TABLE kv_rename_test CHANGE a1 a2 INT FIRST; ALTER TABLE kv_rename_test CHANGE a1 a2 INT FIRST; DESCRIBE kv_rename_test; +EXPLAIN ALTER TABLE kv_rename_test CHANGE a2 a INT AFTER b; ALTER TABLE kv_rename_test CHANGE a2 a INT AFTER b; DESCRIBE kv_rename_test; ALTER TABLE kv_rename_test CHANGE a a1 INT COMMENT 'test comment1'; DESCRIBE kv_rename_test; +EXPLAIN ALTER TABLE kv_rename_test CHANGE a1 a2 INT COMMENT 'test comment2' FIRST; ALTER TABLE kv_rename_test CHANGE a1 a2 INT COMMENT 'test comment2' FIRST; DESCRIBE kv_rename_test; diff --git ql/src/test/queries/clientpositive/show_columns.q ql/src/test/queries/clientpositive/show_columns.q index aa45bae9d5..4d95ebeaa1 100644 --- ql/src/test/queries/clientpositive/show_columns.q +++ ql/src/test/queries/clientpositive/show_columns.q @@ -12,12 +12,15 @@ CREATE TABLE foo_n7(col1 INT, col2 INT, col3 INT, cola INT, colb INT, colc INT, -- SHOW COLUMNS basic syntax tests USE test_db; +EXPLAIN SHOW COLUMNS from foo_n7; SHOW COLUMNS from foo_n7; +EXPLAIN SHOW COLUMNS in foo_n7; SHOW COLUMNS in foo_n7; SHOW COLUMNS in foo_n7 'col*'; SHOW COLUMNS in foo_n7 "col*"; SHOW COLUMNS from foo_n7 'col*'; SHOW COLUMNS from foo_n7 "col*"; +EXPLAIN SHOW COLUMNS from foo_n7 "col1|cola"; SHOW COLUMNS from foo_n7 "col1|cola"; -- SHOW COLUMNS from a database with a name that requires escaping @@ -33,7 +36,9 @@ SHOW COLUMNS in foo_n7 "col+"; SHOW COLUMNS in foo_n7 "nomatch"; use default; +EXPLAIN SHOW COLUMNS from test_db.foo_n7; SHOW COLUMNS from test_db.foo_n7; SHOW COLUMNS from foo_n7 from test_db; SHOW COLUMNS from foo_n7 from test_db "col*"; +EXPLAIN SHOW COLUMNS from foo_n7 from test_db like 'col*'; SHOW COLUMNS from foo_n7 from test_db like 'col*'; diff --git ql/src/test/results/clientnegative/allow_change_col_type_par_neg.q.out ql/src/test/results/clientnegative/allow_change_col_type_par_neg.q.out index 3f91e85a3a..a5a75de2d2 100644 --- ql/src/test/results/clientnegative/allow_change_col_type_par_neg.q.out +++ ql/src/test/results/clientnegative/allow_change_col_type_par_neg.q.out @@ -14,5 +14,5 @@ PREHOOK: query: alter table t1 change column c1 c1 smallint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@t1 PREHOOK: Output: default@t1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : c1 diff --git ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out index 542e85cb89..643b293915 100644 --- ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out +++ ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out @@ -17,4 +17,4 @@ PREHOOK: query: alter table alter_partition_change_col_dup_col change c2 c1 deci PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@alter_partition_change_col_dup_col PREHOOK: Output: default@alter_partition_change_col_dup_col -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Duplicate column name: c1 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Duplicate column name: c1 diff --git ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out index bc5a6f980f..77519da0ea 100644 --- ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out +++ ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out @@ -17,4 +17,4 @@ PREHOOK: query: alter table alter_partition_change_col_nonexist change c3 c4 dec PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@alter_partition_change_col_nonexist PREHOOK: Output: default@alter_partition_change_col_nonexist -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Invalid column reference c3 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Invalid column reference c3 diff --git ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out index acf65f2ff6..f8e16d3f6c 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table1 PREHOOK: query: alter table table1 add constraint pk4 primary key (b) disable novalidate rely PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message: Primary key already exists for: hive.default.table1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message: Primary key already exists for: hive.default.table1) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col1.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col1.q.out index 1617609ce2..4a3ddd3466 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col1.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col1.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table2 add constraint fk1 foreign key (c) references table1(a) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:Child column not found: c) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Child column not found: c) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col2.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col2.q.out index 47166ac6c2..3453c1f13f 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col2.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col2.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table2 add constraint fk1 foreign key (b) references table1(c) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:Parent column not found: c) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Parent column not found: c) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl1.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl1.q.out index 49bc928ac1..9ff66c3204 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl1.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl1.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table3 add constraint fk1 foreign key (c) references table1(a) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:Child table not found: table3) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Child table not found: table3) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl2.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl2.q.out index f5ac4ac54a..a81568b966 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl2.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl2.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table2 add constraint fk1 foreign key (b) references table3(a) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:Parent table not found: table3) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Parent table not found: table3) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_col.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_col.q.out index 71689f70ee..d64b023c78 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_col.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_col.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table1 PREHOOK: query: alter table table1 add constraint pk1 primary key (c) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:Parent column not found: c) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Parent column not found: c) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_tbl.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_tbl.q.out index 792134cbe6..59ed5d6ef0 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_tbl.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_tbl.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table3 add constraint pk3 primary key (a) disable novalidate rely PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:Parent table not found: table3) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Parent table not found: table3) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out index 9e98454f5b..1687c5add7 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table2 add constraint fk1 foreign key (a) references table1(b) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Foreign key references b:string; but no corresponding primary key or unique key exists. Possible keys: [a:string;]) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Foreign key references b:string; but no corresponding primary key or unique key exists. Possible keys: [a:string;]) diff --git ql/src/test/results/clientnegative/altern1.q.out ql/src/test/results/clientnegative/altern1.q.out index ff3c670864..beb0df51af 100644 --- ql/src/test/results/clientnegative/altern1.q.out +++ ql/src/test/results/clientnegative/altern1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table altern1 replace columns(a int, b int, ds string) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@altern1 PREHOOK: Output: default@altern1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Partition column name ds conflicts with table columns. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Partition column name ds conflicts with table columns. diff --git ql/src/test/results/clientnegative/avro_add_column_extschema.q.out ql/src/test/results/clientnegative/avro_add_column_extschema.q.out index a1b1b9ca6e..ac9e994fc4 100644 --- ql/src/test/results/clientnegative/avro_add_column_extschema.q.out +++ ql/src/test/results/clientnegative/avro_add_column_extschema.q.out @@ -40,4 +40,4 @@ CHANGE COLUMN number number bigint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@avro_extschema PREHOOK: Output: default@avro_extschema -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Not allowed to alter schema of Avro stored table having external schema. Consider removing avro.schema.literal or avro.schema.url from table properties. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Not allowed to alter schema of Avro stored table having external schema. Consider removing avro.schema.literal or avro.schema.url from table properties. diff --git ql/src/test/results/clientnegative/column_rename1.q.out ql/src/test/results/clientnegative/column_rename1.q.out index 01549c2665..5509275c74 100644 --- ql/src/test/results/clientnegative/column_rename1.q.out +++ ql/src/test/results/clientnegative/column_rename1.q.out @@ -26,4 +26,4 @@ PREHOOK: query: alter table tstsrc change src_not_exist key_value string PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Invalid column reference src_not_exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Invalid column reference src_not_exist diff --git ql/src/test/results/clientnegative/column_rename2.q.out ql/src/test/results/clientnegative/column_rename2.q.out index 41c2219d10..38bbd53fb1 100644 --- ql/src/test/results/clientnegative/column_rename2.q.out +++ ql/src/test/results/clientnegative/column_rename2.q.out @@ -26,4 +26,4 @@ PREHOOK: query: alter table tstsrc change key value string PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Duplicate column name: value +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Duplicate column name: value diff --git ql/src/test/results/clientnegative/column_rename4.q.out ql/src/test/results/clientnegative/column_rename4.q.out index d5729f127a..c2fcaadeff 100644 --- ql/src/test/results/clientnegative/column_rename4.q.out +++ ql/src/test/results/clientnegative/column_rename4.q.out @@ -26,4 +26,4 @@ PREHOOK: query: alter table tstsrc change key key2 string after key_value PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Invalid column reference key_value +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Invalid column reference key_value diff --git ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out index 8fe4c05872..68a7b97ccc 100644 --- ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out +++ ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out @@ -104,5 +104,5 @@ PREHOOK: query: ALTER TABLE test_table123 REPLACE COLUMNS (a INT, b STRING) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@test_table123 PREHOOK: Output: default@test_table123 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : a,b diff --git ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out index 6291feb931..a178040ea5 100644 --- ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out +++ ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out @@ -40,5 +40,5 @@ PREHOOK: query: ALTER TABLE test_table123 CHANGE COLUMN b b MAP PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@test_table123 PREHOOK: Output: default@test_table123 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : b diff --git ql/src/test/results/clientnegative/drop_invalid_constraint1.q.out ql/src/test/results/clientnegative/drop_invalid_constraint1.q.out index 2cb3996015..7292f1f600 100644 --- ql/src/test/results/clientnegative/drop_invalid_constraint1.q.out +++ ql/src/test/results/clientnegative/drop_invalid_constraint1.q.out @@ -12,4 +12,4 @@ POSTHOOK: query: ALTER TABLE table1 DROP CONSTRAINT pk1 POSTHOOK: type: ALTERTABLE_DROPCONSTRAINT PREHOOK: query: ALTER TABLE table1 DROP CONSTRAINT pk1 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:The constraint: pk1 does not exist for the associated table: default.table1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:The constraint: pk1 does not exist for the associated table: default.table1) diff --git ql/src/test/results/clientnegative/drop_invalid_constraint2.q.out ql/src/test/results/clientnegative/drop_invalid_constraint2.q.out index 04352b40d7..9d20d628a7 100644 --- ql/src/test/results/clientnegative/drop_invalid_constraint2.q.out +++ ql/src/test/results/clientnegative/drop_invalid_constraint2.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: ALTER TABLE table1 DROP CONSTRAINT pk1 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:The constraint: pk1 does not exist for the associated table: default.table1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:The constraint: pk1 does not exist for the associated table: default.table1) diff --git ql/src/test/results/clientnegative/drop_invalid_constraint3.q.out ql/src/test/results/clientnegative/drop_invalid_constraint3.q.out index 03e4bd6097..4754a4d14e 100644 --- ql/src/test/results/clientnegative/drop_invalid_constraint3.q.out +++ ql/src/test/results/clientnegative/drop_invalid_constraint3.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: ALTER TABLE table2 DROP CONSTRAINT pk2 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:The constraint: pk2 does not exist for the associated table: default.table2) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:The constraint: pk2 does not exist for the associated table: default.table2) diff --git ql/src/test/results/clientnegative/drop_invalid_constraint4.q.out ql/src/test/results/clientnegative/drop_invalid_constraint4.q.out index 473dec7a4c..0d2a80ba04 100644 --- ql/src/test/results/clientnegative/drop_invalid_constraint4.q.out +++ ql/src/test/results/clientnegative/drop_invalid_constraint4.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: ALTER TABLE table1 DROP CONSTRAINT pk2 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:The constraint: pk2 does not exist for the associated table: default.table1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:The constraint: pk2 does not exist for the associated table: default.table1) diff --git ql/src/test/results/clientnegative/hms_using_serde_alter_table_update_columns.q.out ql/src/test/results/clientnegative/hms_using_serde_alter_table_update_columns.q.out index 202acd7e31..d0da178633 100644 --- ql/src/test/results/clientnegative/hms_using_serde_alter_table_update_columns.q.out +++ ql/src/test/results/clientnegative/hms_using_serde_alter_table_update_columns.q.out @@ -15,4 +15,4 @@ POSTHOOK: Input: default@hmsserdetable name string PREHOOK: query: ALTER TABLE hmsserdetable UPDATE COLUMNS PREHOOK: type: ALTERTABLE_UPDATECOLUMNS -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. hmsserdetable has serde org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe for which schema is already handled by HMS. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. hmsserdetable has serde org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe for which schema is already handled by HMS. diff --git ql/src/test/results/clientnegative/orc_reorder_columns1.q.out ql/src/test/results/clientnegative/orc_reorder_columns1.q.out index c581f4e312..b9f013094f 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns1.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint first PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out index 8f7255c973..d72eb950c8 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint first PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns2.q.out ql/src/test/results/clientnegative/orc_reorder_columns2.q.out index 54dcdecf1c..a9aebe5bf7 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns2.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns2.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint after val PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out index 6cae15b81a..98cf32357a 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint after val PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_replace_columns1.q.out ql/src/test/results/clientnegative/orc_replace_columns1.q.out index 13f3f1448b..a00e485be8 100644 --- ql/src/test/results/clientnegative/orc_replace_columns1.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc replace columns (k int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out index 46caec214f..6269bb47b6 100644 --- ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc replace columns (k int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_replace_columns2.q.out ql/src/test/results/clientnegative/orc_replace_columns2.q.out index 2316bbbf35..67d23e827d 100644 --- ql/src/test/results/clientnegative/orc_replace_columns2.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns2.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc replace columns (k smallint, val int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : val diff --git ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out index e01b7b9edb..65848e2929 100644 --- ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc replace columns (k smallint, val int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : val diff --git ql/src/test/results/clientnegative/orc_replace_columns3.q.out ql/src/test/results/clientnegative/orc_replace_columns3.q.out index a7b3b72ced..03dd2e7d3a 100644 --- ql/src/test/results/clientnegative/orc_replace_columns3.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns3.q.out @@ -18,5 +18,5 @@ PREHOOK: query: alter table src_orc replace columns (k int, val string, z tinyin PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : z diff --git ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out index b82ad57f6e..5f81e92b5a 100644 --- ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out @@ -18,5 +18,5 @@ PREHOOK: query: alter table src_orc replace columns (k int, val string, z tinyin PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : z diff --git ql/src/test/results/clientnegative/orc_type_promotion1.q.out ql/src/test/results/clientnegative/orc_type_promotion1.q.out index f45283664a..a833c07eee 100644 --- ql/src/test/results/clientnegative/orc_type_promotion1.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion1.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc change key key int PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : key diff --git ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out index 49800a96df..ae6f7e4fca 100644 --- ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc change key key int PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : key diff --git ql/src/test/results/clientnegative/orc_type_promotion2.q.out ql/src/test/results/clientnegative/orc_type_promotion2.q.out index 740ee1e850..785b949ef2 100644 --- ql/src/test/results/clientnegative/orc_type_promotion2.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion2.q.out @@ -66,5 +66,5 @@ PREHOOK: query: alter table src_orc change val val int PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : val diff --git ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out index 28c789a20a..164cdfe9ab 100644 --- ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out @@ -66,5 +66,5 @@ PREHOOK: query: alter table src_orc change val val int PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : val diff --git ql/src/test/results/clientnegative/orc_type_promotion3.q.out ql/src/test/results/clientnegative/orc_type_promotion3.q.out index 4f97e3118e..df721cee61 100644 --- ql/src/test/results/clientnegative/orc_type_promotion3.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion3.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc change key key smallint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : key diff --git ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out index a214985f94..ee766fca99 100644 --- ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc change key key smallint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : key diff --git ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out index d22d9c8763..4532a5d65e 100644 --- ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out +++ ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out @@ -50,4 +50,4 @@ favnumber int PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@myparquettable_parted PREHOOK: Output: default@myparquettable_parted -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns cannot drop columns for table default.myparquettable_parted. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Replacing columns cannot drop columns for table default.myparquettable_parted. SerDe may be incompatible diff --git ql/src/test/results/clientpositive/allow_change_col_type_par.q.out ql/src/test/results/clientpositive/allow_change_col_type_par.q.out index e9570370ea..d4ac7f1468 100644 --- ql/src/test/results/clientpositive/allow_change_col_type_par.q.out +++ ql/src/test/results/clientpositive/allow_change_col_type_par.q.out @@ -10,6 +10,25 @@ hive.metastore.disallow.incompatible.col.type.changes=true metaconf:hive.metastore.disallow.incompatible.col.type.changes=true hive.metastore.disallow.incompatible.col.type.changes=true metaconf:hive.metastore.disallow.incompatible.col.type.changes=false +PREHOOK: query: explain alter table t1_n14 change column c1 c1 smallint +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@t1_n14 +PREHOOK: Output: default@t1_n14 +POSTHOOK: query: explain alter table t1_n14 change column c1 c1 smallint +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@t1_n14 +POSTHOOK: Output: default@t1_n14 +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Change Column + new column name: c1 + new column type: smallint + old column name: c1 + table name: default.t1_n14 + PREHOOK: query: alter table t1_n14 change column c1 c1 smallint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@t1_n14 diff --git ql/src/test/results/clientpositive/avro_alter_table_update_columns.q.out ql/src/test/results/clientpositive/avro_alter_table_update_columns.q.out index 7f74f6c141..3c6e3ea8ab 100644 --- ql/src/test/results/clientpositive/avro_alter_table_update_columns.q.out +++ ql/src/test/results/clientpositive/avro_alter_table_update_columns.q.out @@ -103,6 +103,19 @@ POSTHOOK: query: ALTER TABLE avro_extschema_literal_n1 SET POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: default@avro_extschema_literal_n1 POSTHOOK: Output: default@avro_extschema_literal_n1 +PREHOOK: query: EXPLAIN ALTER TABLE avro_extschema_literal_n1 UPDATE COLUMNS CASCADE +PREHOOK: type: ALTERTABLE_UPDATECOLUMNS +POSTHOOK: query: EXPLAIN ALTER TABLE avro_extschema_literal_n1 UPDATE COLUMNS CASCADE +POSTHOOK: type: ALTERTABLE_UPDATECOLUMNS +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Update Columns + table name: default.avro_extschema_literal_n1 + cascade: true + PREHOOK: query: ALTER TABLE avro_extschema_literal_n1 UPDATE COLUMNS CASCADE PREHOOK: type: ALTERTABLE_UPDATECOLUMNS POSTHOOK: query: ALTER TABLE avro_extschema_literal_n1 UPDATE COLUMNS CASCADE @@ -306,6 +319,20 @@ POSTHOOK: query: ALTER TABLE avro_extschema_url_parted SET POSTHOOK: type: ALTERTABLE_PROPERTIES POSTHOOK: Input: default@avro_extschema_url_parted POSTHOOK: Output: default@avro_extschema_url_parted +PREHOOK: query: EXPLAIN ALTER TABLE avro_extschema_url_parted PARTITION (p1=2018) UPDATE COLUMNS +PREHOOK: type: ALTERTABLE_UPDATECOLUMNS +POSTHOOK: query: EXPLAIN ALTER TABLE avro_extschema_url_parted PARTITION (p1=2018) UPDATE COLUMNS +POSTHOOK: type: ALTERTABLE_UPDATECOLUMNS +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Update Columns + partition: + p1 2018 + table name: default.avro_extschema_url_parted + PREHOOK: query: ALTER TABLE avro_extschema_url_parted PARTITION (p1=2018) UPDATE COLUMNS PREHOOK: type: ALTERTABLE_UPDATECOLUMNS POSTHOOK: query: ALTER TABLE avro_extschema_url_parted PARTITION (p1=2018) UPDATE COLUMNS diff --git ql/src/test/results/clientpositive/input3.q.out ql/src/test/results/clientpositive/input3.q.out index 0ac95780cb..c521ed6c78 100644 --- ql/src/test/results/clientpositive/input3.q.out +++ ql/src/test/results/clientpositive/input3.q.out @@ -54,11 +54,9 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 - Alter Table Operator: - Alter Table - type: add columns - new columns: x double - old name: default.TEST3b + Add Columns + new columns: x double + table name: default.TEST3b PREHOOK: query: ALTER TABLE TEST3b ADD COLUMNS (X DOUBLE) PREHOOK: type: ALTERTABLE_ADDCOLS @@ -141,11 +139,9 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 - Alter Table Operator: - Alter Table - type: replace columns - new columns: r1 int, r2 double - old name: default.TEST3c + Replace Columns + new columns: r1 int, r2 double + table name: default.TEST3c PREHOOK: query: ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 DOUBLE) PREHOOK: type: ALTERTABLE_REPLACECOLS diff --git ql/src/test/results/clientpositive/llap/check_constraint.q.out ql/src/test/results/clientpositive/llap/check_constraint.q.out index 297d8928f8..20b99eb332 100644 --- ql/src/test/results/clientpositive/llap/check_constraint.q.out +++ ql/src/test/results/clientpositive/llap/check_constraint.q.out @@ -414,6 +414,18 @@ POSTHOOK: query: create table tmulti(url string NOT NULL ENABLE, userName string POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@tmulti +PREHOOK: query: explain alter table tmulti add constraint un1 UNIQUE (userName, numClicks) DISABLE +PREHOOK: type: ALTERTABLE_ADDCONSTRAINT +POSTHOOK: query: explain alter table tmulti add constraint un1 UNIQUE (userName, numClicks) DISABLE +POSTHOOK: type: ALTERTABLE_ADDCONSTRAINT +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Add Constraint + table name: default.tmulti + PREHOOK: query: alter table tmulti add constraint un1 UNIQUE (userName, numClicks) DISABLE PREHOOK: type: ALTERTABLE_ADDCONSTRAINT POSTHOOK: query: alter table tmulti add constraint un1 UNIQUE (userName, numClicks) DISABLE @@ -2063,6 +2075,19 @@ POSTHOOK: query: select * from acid_uami_n0 order by de desc limit 15 POSTHOOK: type: QUERY POSTHOOK: Input: default@acid_uami_n0 #### A masked pattern was here #### +PREHOOK: query: explain ALTER table acid_uami_n0 drop constraint ch2 +PREHOOK: type: ALTERTABLE_DROPCONSTRAINT +POSTHOOK: query: explain ALTER table acid_uami_n0 drop constraint ch2 +POSTHOOK: type: ALTERTABLE_DROPCONSTRAINT +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Drop Constraint + constraint name: ch2 + table name: default.acid_uami_n0 + PREHOOK: query: ALTER table acid_uami_n0 drop constraint ch2 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT POSTHOOK: query: ALTER table acid_uami_n0 drop constraint ch2 diff --git ql/src/test/results/clientpositive/rename_column.q.out ql/src/test/results/clientpositive/rename_column.q.out index 43abc7f72a..0783f70a5a 100644 --- ql/src/test/results/clientpositive/rename_column.q.out +++ ql/src/test/results/clientpositive/rename_column.q.out @@ -49,6 +49,26 @@ POSTHOOK: Input: default@kv_rename_test a1 int b int c int +PREHOOK: query: EXPLAIN ALTER TABLE kv_rename_test CHANGE a1 a2 INT FIRST +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@kv_rename_test +PREHOOK: Output: default@kv_rename_test +POSTHOOK: query: EXPLAIN ALTER TABLE kv_rename_test CHANGE a1 a2 INT FIRST +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@kv_rename_test +POSTHOOK: Output: default@kv_rename_test +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Change Column + new column name: a2 + new column type: int + old column name: a1 + table name: default.kv_rename_test + first: true + PREHOOK: query: ALTER TABLE kv_rename_test CHANGE a1 a2 INT FIRST PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@kv_rename_test @@ -66,6 +86,26 @@ POSTHOOK: Input: default@kv_rename_test a2 int b int c int +PREHOOK: query: EXPLAIN ALTER TABLE kv_rename_test CHANGE a2 a INT AFTER b +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@kv_rename_test +PREHOOK: Output: default@kv_rename_test +POSTHOOK: query: EXPLAIN ALTER TABLE kv_rename_test CHANGE a2 a INT AFTER b +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@kv_rename_test +POSTHOOK: Output: default@kv_rename_test +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Change Column + after column: b + new column name: a + new column type: int + old column name: a2 + table name: default.kv_rename_test + PREHOOK: query: ALTER TABLE kv_rename_test CHANGE a2 a INT AFTER b PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@kv_rename_test @@ -100,6 +140,27 @@ POSTHOOK: Input: default@kv_rename_test b int a1 int test comment1 c int +PREHOOK: query: EXPLAIN ALTER TABLE kv_rename_test CHANGE a1 a2 INT COMMENT 'test comment2' FIRST +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@kv_rename_test +PREHOOK: Output: default@kv_rename_test +POSTHOOK: query: EXPLAIN ALTER TABLE kv_rename_test CHANGE a1 a2 INT COMMENT 'test comment2' FIRST +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@kv_rename_test +POSTHOOK: Output: default@kv_rename_test +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Change Column + new column comment: test comment2 + new column name: a2 + new column type: int + old column name: a1 + table name: default.kv_rename_test + first: true + PREHOOK: query: ALTER TABLE kv_rename_test CHANGE a1 a2 INT COMMENT 'test comment2' FIRST PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@kv_rename_test diff --git ql/src/test/results/clientpositive/show_columns.q.out ql/src/test/results/clientpositive/show_columns.q.out index 80d69a7276..c00124f1ea 100644 --- ql/src/test/results/clientpositive/show_columns.q.out +++ ql/src/test/results/clientpositive/show_columns.q.out @@ -20,8 +20,8 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-0 - Show Columns Operator: - table name: shcol_test + Show Columns + table name: shcol_test Stage: Stage-1 Fetch Operator @@ -64,6 +64,27 @@ PREHOOK: Input: database:test_db POSTHOOK: query: USE test_db POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:test_db +PREHOOK: query: EXPLAIN SHOW COLUMNS from foo_n7 +PREHOOK: type: SHOWCOLUMNS +PREHOOK: Input: test_db@foo_n7 +POSTHOOK: query: EXPLAIN SHOW COLUMNS from foo_n7 +POSTHOOK: type: SHOWCOLUMNS +POSTHOOK: Input: test_db@foo_n7 +STAGE DEPENDENCIES: + Stage-0 is a root stage + Stage-1 depends on stages: Stage-0 + +STAGE PLANS: + Stage: Stage-0 + Show Columns + table name: foo_n7 + + Stage: Stage-1 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SHOW COLUMNS from foo_n7 PREHOOK: type: SHOWCOLUMNS PREHOOK: Input: test_db@foo_n7 @@ -79,6 +100,27 @@ col3 cola colb colc +PREHOOK: query: EXPLAIN SHOW COLUMNS in foo_n7 +PREHOOK: type: SHOWCOLUMNS +PREHOOK: Input: test_db@foo_n7 +POSTHOOK: query: EXPLAIN SHOW COLUMNS in foo_n7 +POSTHOOK: type: SHOWCOLUMNS +POSTHOOK: Input: test_db@foo_n7 +STAGE DEPENDENCIES: + Stage-0 is a root stage + Stage-1 depends on stages: Stage-0 + +STAGE PLANS: + Stage: Stage-0 + Show Columns + table name: foo_n7 + + Stage: Stage-1 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SHOW COLUMNS in foo_n7 PREHOOK: type: SHOWCOLUMNS PREHOOK: Input: test_db@foo_n7 @@ -142,6 +184,28 @@ col3 cola colb colc +PREHOOK: query: EXPLAIN SHOW COLUMNS from foo_n7 "col1|cola" +PREHOOK: type: SHOWCOLUMNS +PREHOOK: Input: test_db@foo_n7 +POSTHOOK: query: EXPLAIN SHOW COLUMNS from foo_n7 "col1|cola" +POSTHOOK: type: SHOWCOLUMNS +POSTHOOK: Input: test_db@foo_n7 +STAGE DEPENDENCIES: + Stage-0 is a root stage + Stage-1 depends on stages: Stage-0 + +STAGE PLANS: + Stage: Stage-0 + Show Columns + pattern: col1|cola + table name: foo_n7 + + Stage: Stage-1 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SHOW COLUMNS from foo_n7 "col1|cola" PREHOOK: type: SHOWCOLUMNS PREHOOK: Input: test_db@foo_n7 @@ -221,6 +285,27 @@ PREHOOK: Input: database:default POSTHOOK: query: use default POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:default +PREHOOK: query: EXPLAIN SHOW COLUMNS from test_db.foo_n7 +PREHOOK: type: SHOWCOLUMNS +PREHOOK: Input: test_db@foo_n7 +POSTHOOK: query: EXPLAIN SHOW COLUMNS from test_db.foo_n7 +POSTHOOK: type: SHOWCOLUMNS +POSTHOOK: Input: test_db@foo_n7 +STAGE DEPENDENCIES: + Stage-0 is a root stage + Stage-1 depends on stages: Stage-0 + +STAGE PLANS: + Stage: Stage-0 + Show Columns + table name: test_db.foo_n7 + + Stage: Stage-1 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SHOW COLUMNS from test_db.foo_n7 PREHOOK: type: SHOWCOLUMNS PREHOOK: Input: test_db@foo_n7 @@ -263,6 +348,28 @@ col3 cola colb colc +PREHOOK: query: EXPLAIN SHOW COLUMNS from foo_n7 from test_db like 'col*' +PREHOOK: type: SHOWCOLUMNS +PREHOOK: Input: test_db@foo_n7 +POSTHOOK: query: EXPLAIN SHOW COLUMNS from foo_n7 from test_db like 'col*' +POSTHOOK: type: SHOWCOLUMNS +POSTHOOK: Input: test_db@foo_n7 +STAGE DEPENDENCIES: + Stage-0 is a root stage + Stage-1 depends on stages: Stage-0 + +STAGE PLANS: + Stage: Stage-0 + Show Columns + pattern: col* + table name: test_db.foo_n7 + + Stage: Stage-1 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SHOW COLUMNS from foo_n7 from test_db like 'col*' PREHOOK: type: SHOWCOLUMNS PREHOOK: Input: test_db@foo_n7