diff --git data/files/dec.txt data/files/dec.txt new file mode 100644 index 0000000..58ff04b --- /dev/null +++ data/files/dec.txt @@ -0,0 +1,10 @@ +Tom234.79 +Beck77.341 +Snow55.71 +Mary4.329 +Cluck5.96 +Tom-12.25 +Mary33.33 +Tom19.00 +Beck0.0 +Beck79.9 diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 1ba052c..a142d22 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -3507,18 +3507,20 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { } Table oldTbl = tbl.copy(); + List oldCols = (part == null ? tbl.getCols() : part.getCols()); + StorageDescriptor sd = (part == null ? tbl.getTTable().getSd() : part.getTPartition().getSd()); if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) { tbl.setTableName(alterTbl.getNewName()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDCOLS) { List newCols = alterTbl.getNewCols(); - List oldCols = tbl.getCols(); - if (tbl.getSerializationLib().equals( + String serializationLib = sd.getSerdeInfo().getSerializationLib(); + if (serializationLib.equals( "org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { console .printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe"); - tbl.setSerializationLib(LazySimpleSerDe.class.getName()); - tbl.getTTable().getSd().setCols(newCols); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setCols(newCols); } else { // make sure the columns does not already exist Iterator iterNewCols = newCols.iterator(); @@ -3534,10 +3536,9 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { } oldCols.add(newCol); } - tbl.getTTable().getSd().setCols(oldCols); + sd.setCols(oldCols); } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAMECOLUMN) { - List oldCols = tbl.getCols(); List newCols = new ArrayList(); Iterator iterOldCols = oldCols.iterator(); String oldName = alterTbl.getOldColName(); @@ -3598,23 +3599,23 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { newCols.add(position, column); } - tbl.getTTable().getSd().setCols(newCols); - + sd.setCols(newCols); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.REPLACECOLS) { // change SerDe to LazySimpleSerDe if it is columnsetSerDe - if (tbl.getSerializationLib().equals( + String serializationLib = sd.getSerdeInfo().getSerializationLib(); + if (serializationLib.equals( "org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { console .printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe"); - tbl.setSerializationLib(LazySimpleSerDe.class.getName()); - } else if (!tbl.getSerializationLib().equals( + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + } else if (!serializationLib.equals( MetadataTypedColumnsetSerDe.class.getName()) - && !tbl.getSerializationLib().equals(LazySimpleSerDe.class.getName()) - && !tbl.getSerializationLib().equals(ColumnarSerDe.class.getName()) - && !tbl.getSerializationLib().equals(DynamicSerDe.class.getName())) { + && !serializationLib.equals(LazySimpleSerDe.class.getName()) + && !serializationLib.equals(ColumnarSerDe.class.getName()) + && !serializationLib.equals(DynamicSerDe.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName()); } - tbl.getTTable().getSd().setCols(alterTbl.getNewCols()); + sd.setCols(alterTbl.getNewCols()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) { tbl.getTTable().getParameters().putAll(alterTbl.getProps()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.DROPPROPS) { @@ -3623,28 +3624,16 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { tbl.getTTable().getParameters().remove(keyItr.next()); } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDEPROPS) { - if (part != null) { - part.getTPartition().getSd().getSerdeInfo().getParameters().putAll( - alterTbl.getProps()); - } else { - tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll( - alterTbl.getProps()); - } + sd.getSerdeInfo().getParameters().putAll(alterTbl.getProps()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDE) { String serdeName = alterTbl.getSerdeName(); + sd.getSerdeInfo().setSerializationLib(serdeName); + if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) { + sd.getSerdeInfo().getParameters().putAll(alterTbl.getProps()); + } if (part != null) { - part.getTPartition().getSd().getSerdeInfo().setSerializationLib(serdeName); - if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) { - part.getTPartition().getSd().getSerdeInfo().getParameters().putAll( - alterTbl.getProps()); - } part.getTPartition().getSd().setCols(part.getTPartition().getSd().getCols()); } else { - tbl.setSerializationLib(alterTbl.getSerdeName()); - if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) { - tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll( - alterTbl.getProps()); - } if (!conf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname) .contains(serdeName)) { tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), tbl. @@ -3652,19 +3641,10 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { } } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDFILEFORMAT) { - if(part != null) { - part.getTPartition().getSd().setInputFormat(alterTbl.getInputFormat()); - part.getTPartition().getSd().setOutputFormat(alterTbl.getOutputFormat()); - if (alterTbl.getSerdeName() != null) { - part.getTPartition().getSd().getSerdeInfo().setSerializationLib( - alterTbl.getSerdeName()); - } - } else { - tbl.getTTable().getSd().setInputFormat(alterTbl.getInputFormat()); - tbl.getTTable().getSd().setOutputFormat(alterTbl.getOutputFormat()); - if (alterTbl.getSerdeName() != null) { - tbl.setSerializationLib(alterTbl.getSerdeName()); - } + sd.setInputFormat(alterTbl.getInputFormat()); + sd.setOutputFormat(alterTbl.getOutputFormat()); + if (alterTbl.getSerdeName() != null) { + sd.getSerdeInfo().setSerializationLib(alterTbl.getSerdeName()); } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ALTERPROTECTMODE) { boolean protectModeEnable = alterTbl.isProtectModeEnable(); @@ -3694,8 +3674,6 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { .getColumnNamesFromSortCols(alterTbl.getSortColumns())); } - StorageDescriptor sd = part == null ? tbl.getTTable().getSd() : part.getTPartition().getSd(); - if (alterTbl.isTurnOffSorting()) { sd.setSortCols(new ArrayList()); } else if (alterTbl.getNumberBuckets() == -1) { @@ -3716,11 +3694,7 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { || locUri.getScheme().trim().equals("")) { throw new HiveException(ErrorMsg.BAD_LOCATION_VALUE, newLocation); } - if (part != null) { - part.setLocation(newLocation); - } else { - tbl.setDataLocation(new Path(locUri)); - } + sd.setLocation(newLocation); } catch (URISyntaxException e) { throw new HiveException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index ed31f6d..147e494 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -276,7 +276,14 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { analyzeAlterTableClusterSort(ast, tableName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_COMPACT) { analyzeAlterTableCompact(ast, tableName, partSpec); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { + analyzeAlterTableModifyCols(tableName, ast, partSpec, AlterTableTypes.ADDCOLS); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { + analyzeAlterTableModifyCols(tableName, ast, partSpec, AlterTableTypes.REPLACECOLS); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { + analyzeAlterTableRenameCol(tableName, ast, partSpec); } + break; } case HiveParser.TOK_DROPTABLE: @@ -383,15 +390,6 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { case HiveParser.TOK_ALTERTABLE_UNARCHIVE: analyzeAlterTableArchive(ast, true); break; - case HiveParser.TOK_ALTERTABLE_ADDCOLS: - analyzeAlterTableModifyCols(ast, AlterTableTypes.ADDCOLS); - break; - case HiveParser.TOK_ALTERTABLE_REPLACECOLS: - analyzeAlterTableModifyCols(ast, AlterTableTypes.REPLACECOLS); - break; - case HiveParser.TOK_ALTERTABLE_RENAMECOL: - analyzeAlterTableRenameCol(ast); - break; case HiveParser.TOK_ALTERTABLE_ADDPARTS: analyzeAlterTableAddParts(ast, false); break; @@ -2458,19 +2456,19 @@ private void analyzeAlterTableRename(ASTNode ast, boolean expectView) throws Sem alterTblDesc), conf)); } - private void analyzeAlterTableRenameCol(ASTNode ast) throws SemanticException { - String tblName = getUnescapedName((ASTNode) ast.getChild(0)); + private void analyzeAlterTableRenameCol(String tblName, ASTNode ast, + HashMap partSpec) throws SemanticException { String newComment = null; String newType = null; - newType = getTypeStringFromAST((ASTNode) ast.getChild(3)); + newType = getTypeStringFromAST((ASTNode) ast.getChild(2)); boolean first = false; String flagCol = null; ASTNode positionNode = null; - if (ast.getChildCount() == 6) { - newComment = unescapeSQLString(ast.getChild(4).getText()); - positionNode = (ASTNode) ast.getChild(5); - } else if (ast.getChildCount() == 5) { - if (ast.getChild(4).getType() == HiveParser.StringLiteral) { + if (ast.getChildCount() == 5) { + newComment = unescapeSQLString(ast.getChild(3).getText()); + positionNode = (ASTNode) ast.getChild(4); + } else if (ast.getChildCount() == 4) { + if (ast.getChild(3).getType() == HiveParser.StringLiteral) { newComment = unescapeSQLString(ast.getChild(4).getText()); } else { positionNode = (ASTNode) ast.getChild(4); @@ -2485,8 +2483,8 @@ private void analyzeAlterTableRenameCol(ASTNode ast) throws SemanticException { } } - String oldColName = ast.getChild(1).getText(); - String newColName = ast.getChild(2).getText(); + String oldColName = ast.getChild(0).getText(); + String newColName = ast.getChild(1).getText(); /* Validate the operation of renaming a column name. */ Table tab = getTable(tblName); @@ -2499,10 +2497,10 @@ private void analyzeAlterTableRenameCol(ASTNode ast) throws SemanticException { + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg()); } - AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, + AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol); - addInputsOutputsAlterTable(tblName, null, alterTblDesc); + addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); @@ -2544,14 +2542,13 @@ private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, alterBucketNum), conf)); } - private void analyzeAlterTableModifyCols(ASTNode ast, - AlterTableTypes alterType) throws SemanticException { - String tblName = getUnescapedName((ASTNode) ast.getChild(0)); - List newCols = getColumns((ASTNode) ast.getChild(1)); - AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols, + private void analyzeAlterTableModifyCols(String tblName, ASTNode ast, + HashMap partSpec, AlterTableTypes alterType) throws SemanticException { + List newCols = getColumns((ASTNode) ast.getChild(0)); + AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, newCols, alterType); - addInputsOutputsAlterTable(tblName, null, alterTblDesc); + addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 13bbf0a..e59ab83 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -927,8 +927,6 @@ alterTableStatementSuffix @init { pushMsg("alter table statement", state); } @after { popMsg(state); } : alterStatementSuffixRename - | alterStatementSuffixAddCol - | alterStatementSuffixRenameCol | alterStatementSuffixDropPartitions | alterStatementSuffixAddPartitions | alterStatementSuffixTouch @@ -1009,16 +1007,16 @@ alterStatementSuffixRename alterStatementSuffixAddCol @init { pushMsg("add column statement", state); } @after { popMsg(state); } - : identifier (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN - -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS identifier columnNameTypeList) - -> ^(TOK_ALTERTABLE_REPLACECOLS identifier columnNameTypeList) + : (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN + -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS columnNameTypeList) + -> ^(TOK_ALTERTABLE_REPLACECOLS columnNameTypeList) ; alterStatementSuffixRenameCol @init { pushMsg("rename column name", state); } @after { popMsg(state); } - : identifier KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition? - ->^(TOK_ALTERTABLE_RENAMECOL identifier $oldName $newName colType $comment? alterStatementChangeColPosition?) + : KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition? + ->^(TOK_ALTERTABLE_RENAMECOL $oldName $newName colType $comment? alterStatementChangeColPosition?) ; alterStatementChangeColPosition @@ -1127,6 +1125,8 @@ alterTblPartitionStatementSuffix | alterTblPartitionStatementSuffixSkewedLocation | alterStatementSuffixClusterbySortby | alterStatementSuffixCompact + | alterStatementSuffixRenameCol + | alterStatementSuffixAddCol ; alterStatementSuffixFileFormat diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index f6b70d8..316dfbb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -143,6 +143,15 @@ tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_CLUSTER_SORT, new HiveOperation[] {HiveOperation.ALTERTABLE_CLUSTER_SORT, HiveOperation.ALTERTABLE_CLUSTER_SORT}); + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ADDCOLS, + new HiveOperation[] {HiveOperation.ALTERTABLE_ADDCOLS, + HiveOperation.ALTERTABLE_ADDCOLS}); + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_REPLACECOLS, + new HiveOperation[] {HiveOperation.ALTERTABLE_REPLACECOLS, + HiveOperation.ALTERTABLE_REPLACECOLS}); + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_RENAMECOL, + new HiveOperation[] {HiveOperation.ALTERTABLE_RENAMECOL, + HiveOperation.ALTERTABLE_RENAMECOL}); } public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index 20d863b..70a8439 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -102,10 +102,12 @@ public AlterTableDesc() { * @param newComment * @param newType */ - public AlterTableDesc(String tblName, String oldColName, String newColName, + public AlterTableDesc(String tblName, HashMap partSpec, + String oldColName, String newColName, String newType, String newComment, boolean first, String afterCol) { super(); oldName = tblName; + this.partSpec = partSpec; this.oldColName = oldColName; this.newColName = newColName; newColType = newType; @@ -134,11 +136,12 @@ public AlterTableDesc(String oldName, String newName, boolean expectView) { * @param newCols * new columns to be added */ - public AlterTableDesc(String name, List newCols, + public AlterTableDesc(String name, HashMap partSpec, List newCols, AlterTableTypes alterType) { op = alterType; oldName = name; this.newCols = new ArrayList(newCols); + this.partSpec = partSpec; } /** diff --git ql/src/test/queries/clientnegative/alter_partition_change_col_dup_col.q ql/src/test/queries/clientnegative/alter_partition_change_col_dup_col.q new file mode 100644 index 0000000..8f5a884 --- /dev/null +++ ql/src/test/queries/clientnegative/alter_partition_change_col_dup_col.q @@ -0,0 +1,4 @@ +create table alter_partition_change_col_dup_col (c1 string, c2 decimal(10,0)) partitioned by (p1 string); +alter table alter_partition_change_col_dup_col add partition (p1='abc'); +-- should fail because of duplicate name c1 +alter table alter_partition_change_col_dup_col change c2 c1 decimal(14,4); diff --git ql/src/test/queries/clientnegative/alter_partition_change_col_nonexist.q ql/src/test/queries/clientnegative/alter_partition_change_col_nonexist.q new file mode 100644 index 0000000..97348d9 --- /dev/null +++ ql/src/test/queries/clientnegative/alter_partition_change_col_nonexist.q @@ -0,0 +1,5 @@ +create table alter_partition_change_col_nonexist (c1 string, c2 decimal(10,0)) partitioned by (p1 string); +alter table alter_partition_change_col_nonexist add partition (p1='abc'); +-- should fail because of nonexistent column c3 +alter table alter_partition_change_col_nonexist change c3 c4 decimal(14,4); + diff --git ql/src/test/queries/clientpositive/alter_partition_change_col.q ql/src/test/queries/clientpositive/alter_partition_change_col.q new file mode 100644 index 0000000..64aafd1 --- /dev/null +++ ql/src/test/queries/clientpositive/alter_partition_change_col.q @@ -0,0 +1,57 @@ +SET hive.exec.dynamic.partition = true; +SET hive.exec.dynamic.partition.mode = nonstrict; + +create table alter_partition_change_col0 (c1 string, c2 string); +load data local inpath '../../data/files/dec.txt' overwrite into table alter_partition_change_col0; + +create table alter_partition_change_col1 (c1 string, c2 string) partitioned by (p1 string); + +insert overwrite table alter_partition_change_col1 partition (p1) + select c1, c2, 'abc' from alter_partition_change_col0 + union all + select c1, c2, null from alter_partition_change_col0; + +show partitions alter_partition_change_col1; +select * from alter_partition_change_col1; + +-- Change c2 to decimal(10,0) +alter table alter_partition_change_col1 change c2 c2 decimal(10,0); +alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(10,0); +alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(10,0); +select * from alter_partition_change_col1; + +-- Change the column type at the table level. Table-level describe shows the new type, but the existing partition does not. +alter table alter_partition_change_col1 change c2 c2 decimal(14,4); +describe alter_partition_change_col1; +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1; + +-- now change the column type of the existing partition +alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(14,4); +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1; + +-- change column for default partition value +alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(14,4); +describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__'); +select * from alter_partition_change_col1; + +-- Try out replace columns +alter table alter_partition_change_col1 partition (p1='abc') replace columns (c1 string); +describe alter_partition_change_col1; +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1; +alter table alter_partition_change_col1 replace columns (c1 string); +describe alter_partition_change_col1; +select * from alter_partition_change_col1; + +-- Try add columns +alter table alter_partition_change_col1 add columns (c2 decimal(14,4)); +describe alter_partition_change_col1; +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1; + +alter table alter_partition_change_col1 partition (p1='abc') add columns (c2 decimal(14,4)); +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1; + diff --git ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out new file mode 100644 index 0000000..d2c252f --- /dev/null +++ ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out @@ -0,0 +1,21 @@ +PREHOOK: query: create table alter_partition_change_col_dup_col (c1 string, c2 decimal(10,0)) partitioned by (p1 string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@alter_partition_change_col_dup_col +POSTHOOK: query: create table alter_partition_change_col_dup_col (c1 string, c2 decimal(10,0)) partitioned by (p1 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@alter_partition_change_col_dup_col +PREHOOK: query: alter table alter_partition_change_col_dup_col add partition (p1='abc') +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Output: default@alter_partition_change_col_dup_col +POSTHOOK: query: alter table alter_partition_change_col_dup_col add partition (p1='abc') +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Output: default@alter_partition_change_col_dup_col +POSTHOOK: Output: default@alter_partition_change_col_dup_col@p1=abc +PREHOOK: query: -- should fail because of duplicate name c1 +alter table alter_partition_change_col_dup_col change c2 c1 decimal(14,4) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col_dup_col +PREHOOK: Output: default@alter_partition_change_col_dup_col +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Duplicate column name: c1 diff --git ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out new file mode 100644 index 0000000..8e4422e --- /dev/null +++ ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out @@ -0,0 +1,21 @@ +PREHOOK: query: create table alter_partition_change_col_nonexist (c1 string, c2 decimal(10,0)) partitioned by (p1 string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@alter_partition_change_col_nonexist +POSTHOOK: query: create table alter_partition_change_col_nonexist (c1 string, c2 decimal(10,0)) partitioned by (p1 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@alter_partition_change_col_nonexist +PREHOOK: query: alter table alter_partition_change_col_nonexist add partition (p1='abc') +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Output: default@alter_partition_change_col_nonexist +POSTHOOK: query: alter table alter_partition_change_col_nonexist add partition (p1='abc') +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Output: default@alter_partition_change_col_nonexist +POSTHOOK: Output: default@alter_partition_change_col_nonexist@p1=abc +PREHOOK: query: -- should fail because of nonexistent column c3 +alter table alter_partition_change_col_nonexist change c3 c4 decimal(14,4) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col_nonexist +PREHOOK: Output: default@alter_partition_change_col_nonexist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Invalid column reference c3 diff --git ql/src/test/results/clientpositive/alter_partition_change_col.q.out ql/src/test/results/clientpositive/alter_partition_change_col.q.out new file mode 100644 index 0000000..e48464c --- /dev/null +++ ql/src/test/results/clientpositive/alter_partition_change_col.q.out @@ -0,0 +1,574 @@ +PREHOOK: query: create table alter_partition_change_col0 (c1 string, c2 string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@alter_partition_change_col0 +POSTHOOK: query: create table alter_partition_change_col0 (c1 string, c2 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@alter_partition_change_col0 +PREHOOK: query: load data local inpath '../../data/files/dec.txt' overwrite into table alter_partition_change_col0 +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@alter_partition_change_col0 +POSTHOOK: query: load data local inpath '../../data/files/dec.txt' overwrite into table alter_partition_change_col0 +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@alter_partition_change_col0 +PREHOOK: query: create table alter_partition_change_col1 (c1 string, c2 string) partitioned by (p1 string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: create table alter_partition_change_col1 (c1 string, c2 string) partitioned by (p1 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: insert overwrite table alter_partition_change_col1 partition (p1) + select c1, c2, 'abc' from alter_partition_change_col0 + union all + select c1, c2, null from alter_partition_change_col0 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col0 +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: insert overwrite table alter_partition_change_col1 partition (p1) + select c1, c2, 'abc' from alter_partition_change_col0 + union all + select c1, c2, null from alter_partition_change_col0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col0 +POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=__HIVE_DEFAULT_PARTITION__).c1 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), ] +POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=__HIVE_DEFAULT_PARTITION__).c2 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), ] +POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=abc).c1 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), ] +POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=abc).c2 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), ] +PREHOOK: query: show partitions alter_partition_change_col1 +PREHOOK: type: SHOWPARTITIONS +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: show partitions alter_partition_change_col1 +POSTHOOK: type: SHOWPARTITIONS +POSTHOOK: Input: default@alter_partition_change_col1 +p1=__HIVE_DEFAULT_PARTITION__ +p1=abc +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 234.79 __HIVE_DEFAULT_PARTITION__ +Cluck 5.96 __HIVE_DEFAULT_PARTITION__ +Tom 19.00 __HIVE_DEFAULT_PARTITION__ +Mary 4.329 __HIVE_DEFAULT_PARTITION__ +Beck 0.0 __HIVE_DEFAULT_PARTITION__ +Snow 55.71 __HIVE_DEFAULT_PARTITION__ +Mary 33.33 __HIVE_DEFAULT_PARTITION__ +Beck 77.341 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 __HIVE_DEFAULT_PARTITION__ +Tom -12.25 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 abc +Beck 0.0 abc +Tom 19.00 abc +Mary 33.33 abc +Tom -12.25 abc +Mary 4.329 abc +Snow 55.71 abc +Beck 77.341 abc +Tom 234.79 abc +Cluck 5.96 abc +PREHOOK: query: -- Change c2 to decimal(10,0) +alter table alter_partition_change_col1 change c2 c2 decimal(10,0) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: -- Change c2 to decimal(10,0) +alter table alter_partition_change_col1 change c2 c2 decimal(10,0) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(10,0) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(10,0) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(10,0) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(10,0) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 235 __HIVE_DEFAULT_PARTITION__ +Cluck 6 __HIVE_DEFAULT_PARTITION__ +Tom 19 __HIVE_DEFAULT_PARTITION__ +Mary 4 __HIVE_DEFAULT_PARTITION__ +Beck 0 __HIVE_DEFAULT_PARTITION__ +Snow 56 __HIVE_DEFAULT_PARTITION__ +Mary 33 __HIVE_DEFAULT_PARTITION__ +Beck 77 __HIVE_DEFAULT_PARTITION__ +Beck 80 __HIVE_DEFAULT_PARTITION__ +Tom -12 __HIVE_DEFAULT_PARTITION__ +Beck 80 abc +Beck 0 abc +Tom 19 abc +Mary 33 abc +Tom -12 abc +Mary 4 abc +Snow 56 abc +Beck 77 abc +Tom 235 abc +Cluck 6 abc +PREHOOK: query: -- Change the column type at the table level. Table-level describe shows the new type, but the existing partition does not. +alter table alter_partition_change_col1 change c2 c2 decimal(14,4) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: -- Change the column type at the table level. Table-level describe shows the new type, but the existing partition does not. +alter table alter_partition_change_col1 change c2 c2 decimal(14,4) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: describe alter_partition_change_col1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(10,0) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 235 __HIVE_DEFAULT_PARTITION__ +Cluck 6 __HIVE_DEFAULT_PARTITION__ +Tom 19 __HIVE_DEFAULT_PARTITION__ +Mary 4 __HIVE_DEFAULT_PARTITION__ +Beck 0 __HIVE_DEFAULT_PARTITION__ +Snow 56 __HIVE_DEFAULT_PARTITION__ +Mary 33 __HIVE_DEFAULT_PARTITION__ +Beck 77 __HIVE_DEFAULT_PARTITION__ +Beck 80 __HIVE_DEFAULT_PARTITION__ +Tom -12 __HIVE_DEFAULT_PARTITION__ +Beck 80 abc +Beck 0 abc +Tom 19 abc +Mary 33 abc +Tom -12 abc +Mary 4 abc +Snow 56 abc +Beck 77 abc +Tom 235 abc +Cluck 6 abc +PREHOOK: query: -- now change the column type of the existing partition +alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(14,4) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: query: -- now change the column type of the existing partition +alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(14,4) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 235 __HIVE_DEFAULT_PARTITION__ +Cluck 6 __HIVE_DEFAULT_PARTITION__ +Tom 19 __HIVE_DEFAULT_PARTITION__ +Mary 4 __HIVE_DEFAULT_PARTITION__ +Beck 0 __HIVE_DEFAULT_PARTITION__ +Snow 56 __HIVE_DEFAULT_PARTITION__ +Mary 33 __HIVE_DEFAULT_PARTITION__ +Beck 77 __HIVE_DEFAULT_PARTITION__ +Beck 80 __HIVE_DEFAULT_PARTITION__ +Tom -12 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 abc +Beck 0.0 abc +Tom 19.00 abc +Mary 33.33 abc +Tom -12.25 abc +Mary 4.329 abc +Snow 55.71 abc +Beck 77.341 abc +Tom 234.79 abc +Cluck 5.96 abc +PREHOOK: query: -- change column for default partition value +alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(14,4) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: query: -- change column for default partition value +alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(14,4) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: query: describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 234.79 __HIVE_DEFAULT_PARTITION__ +Cluck 5.96 __HIVE_DEFAULT_PARTITION__ +Tom 19.00 __HIVE_DEFAULT_PARTITION__ +Mary 4.329 __HIVE_DEFAULT_PARTITION__ +Beck 0.0 __HIVE_DEFAULT_PARTITION__ +Snow 55.71 __HIVE_DEFAULT_PARTITION__ +Mary 33.33 __HIVE_DEFAULT_PARTITION__ +Beck 77.341 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 __HIVE_DEFAULT_PARTITION__ +Tom -12.25 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 abc +Beck 0.0 abc +Tom 19.00 abc +Mary 33.33 abc +Tom -12.25 abc +Mary 4.329 abc +Snow 55.71 abc +Beck 77.341 abc +Tom 234.79 abc +Cluck 5.96 abc +PREHOOK: query: -- Try out replace columns +alter table alter_partition_change_col1 partition (p1='abc') replace columns (c1 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: query: -- Try out replace columns +alter table alter_partition_change_col1 partition (p1='abc') replace columns (c1 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: describe alter_partition_change_col1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 234.79 __HIVE_DEFAULT_PARTITION__ +Cluck 5.96 __HIVE_DEFAULT_PARTITION__ +Tom 19.00 __HIVE_DEFAULT_PARTITION__ +Mary 4.329 __HIVE_DEFAULT_PARTITION__ +Beck 0.0 __HIVE_DEFAULT_PARTITION__ +Snow 55.71 __HIVE_DEFAULT_PARTITION__ +Mary 33.33 __HIVE_DEFAULT_PARTITION__ +Beck 77.341 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 __HIVE_DEFAULT_PARTITION__ +Tom -12.25 __HIVE_DEFAULT_PARTITION__ +Beck NULL abc +Beck NULL abc +Tom NULL abc +Mary NULL abc +Tom NULL abc +Mary NULL abc +Snow NULL abc +Beck NULL abc +Tom NULL abc +Cluck NULL abc +PREHOOK: query: alter table alter_partition_change_col1 replace columns (c1 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: alter table alter_partition_change_col1 replace columns (c1 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: describe alter_partition_change_col1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom __HIVE_DEFAULT_PARTITION__ +Cluck __HIVE_DEFAULT_PARTITION__ +Tom __HIVE_DEFAULT_PARTITION__ +Mary __HIVE_DEFAULT_PARTITION__ +Beck __HIVE_DEFAULT_PARTITION__ +Snow __HIVE_DEFAULT_PARTITION__ +Mary __HIVE_DEFAULT_PARTITION__ +Beck __HIVE_DEFAULT_PARTITION__ +Beck __HIVE_DEFAULT_PARTITION__ +Tom __HIVE_DEFAULT_PARTITION__ +Beck abc +Beck abc +Tom abc +Mary abc +Tom abc +Mary abc +Snow abc +Beck abc +Tom abc +Cluck abc +PREHOOK: query: -- Try add columns +alter table alter_partition_change_col1 add columns (c2 decimal(14,4)) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: -- Try add columns +alter table alter_partition_change_col1 add columns (c2 decimal(14,4)) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: describe alter_partition_change_col1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 234.79 __HIVE_DEFAULT_PARTITION__ +Cluck 5.96 __HIVE_DEFAULT_PARTITION__ +Tom 19.00 __HIVE_DEFAULT_PARTITION__ +Mary 4.329 __HIVE_DEFAULT_PARTITION__ +Beck 0.0 __HIVE_DEFAULT_PARTITION__ +Snow 55.71 __HIVE_DEFAULT_PARTITION__ +Mary 33.33 __HIVE_DEFAULT_PARTITION__ +Beck 77.341 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 __HIVE_DEFAULT_PARTITION__ +Tom -12.25 __HIVE_DEFAULT_PARTITION__ +Beck NULL abc +Beck NULL abc +Tom NULL abc +Mary NULL abc +Tom NULL abc +Mary NULL abc +Snow NULL abc +Beck NULL abc +Tom NULL abc +Cluck NULL abc +PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') add columns (c2 decimal(14,4)) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') add columns (c2 decimal(14,4)) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__ +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 234.79 __HIVE_DEFAULT_PARTITION__ +Cluck 5.96 __HIVE_DEFAULT_PARTITION__ +Tom 19.00 __HIVE_DEFAULT_PARTITION__ +Mary 4.329 __HIVE_DEFAULT_PARTITION__ +Beck 0.0 __HIVE_DEFAULT_PARTITION__ +Snow 55.71 __HIVE_DEFAULT_PARTITION__ +Mary 33.33 __HIVE_DEFAULT_PARTITION__ +Beck 77.341 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 __HIVE_DEFAULT_PARTITION__ +Tom -12.25 __HIVE_DEFAULT_PARTITION__ +Beck 79.9 abc +Beck 0.0 abc +Tom 19.00 abc +Mary 33.33 abc +Tom -12.25 abc +Mary 4.329 abc +Snow 55.71 abc +Beck 77.341 abc +Tom 234.79 abc +Cluck 5.96 abc