diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index e42bbdd..30da286 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -3251,12 +3251,18 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDCOLS) { List newCols = alterTbl.getNewCols(); List oldCols = tbl.getCols(); - if (tbl.getSerializationLib().equals( + StorageDescriptor sd = tbl.getTTable().getSd(); + if (part != null) { + oldCols = part.getCols(); + sd = part.getTPartition().getSd(); + } + String serializationLib = sd.getSerdeInfo().getSerializationLib(); + if (serializationLib.equals( "org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { console .printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe"); - tbl.setSerializationLib(LazySimpleSerDe.class.getName()); - tbl.getTTable().getSd().setCols(newCols); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setCols(newCols); } else { // make sure the columns does not already exist Iterator iterNewCols = newCols.iterator(); @@ -3272,10 +3278,14 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { } oldCols.add(newCol); } - tbl.getTTable().getSd().setCols(oldCols); + sd.setCols(oldCols); } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAMECOLUMN) { List oldCols = tbl.getCols(); + if (part != null) { + // For partition, the columns should come from the partition. + oldCols = part.getCols(); + } List newCols = new ArrayList(); Iterator iterOldCols = oldCols.iterator(); String oldName = alterTbl.getOldColName(); @@ -3336,7 +3346,11 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { newCols.add(position, column); } - tbl.getTTable().getSd().setCols(newCols); + if (part != null) { + part.getTPartition().getSd().setCols(newCols); + } else { + tbl.getTTable().getSd().setCols(newCols); + } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.REPLACECOLS) { // change SerDe to LazySimpleSerDe if it is columnsetSerDe @@ -3353,7 +3367,11 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { && !tbl.getSerializationLib().equals(ParquetHiveSerDe.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName()); } - tbl.getTTable().getSd().setCols(alterTbl.getNewCols()); + if (part != null) { + part.getTPartition().getSd().setCols(alterTbl.getNewCols()); + } else { + tbl.getTTable().getSd().setCols(alterTbl.getNewCols()); + } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) { tbl.getTTable().getParameters().putAll(alterTbl.getProps()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.DROPPROPS) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 05cde3e..3ab179b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -267,11 +267,11 @@ public void analyzeInternal(ASTNode input) throws SemanticException { } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { analyzeAlterTableArchive(qualified, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { - analyzeAlterTableModifyCols(qualified, ast, AlterTableTypes.ADDCOLS); + analyzeAlterTableModifyCols(qualified, ast, partSpec, AlterTableTypes.ADDCOLS); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { - analyzeAlterTableModifyCols(qualified, ast, AlterTableTypes.REPLACECOLS); + analyzeAlterTableModifyCols(qualified, ast, partSpec, AlterTableTypes.REPLACECOLS); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { - analyzeAlterTableRenameCol(qualified, ast); + analyzeAlterTableRenameCol(qualified, ast, partSpec); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) { analyzeAlterTableAddParts(qualified, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) { @@ -2477,7 +2477,8 @@ private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expec alterTblDesc), conf)); } - private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast) throws SemanticException { + private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, + HashMap partSpec) throws SemanticException { String newComment = null; String newType = null; newType = getTypeStringFromAST((ASTNode) ast.getChild(2)); @@ -2518,10 +2519,10 @@ private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast) throws } String tblName = getDotName(qualified); - AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, + AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol); - addInputsOutputsAlterTable(tblName, null, alterTblDesc); + addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); @@ -2565,14 +2566,14 @@ private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, } private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast, - AlterTableTypes alterType) throws SemanticException { + HashMap partSpec, AlterTableTypes alterType) throws SemanticException { String tblName = getDotName(qualified); List newCols = getColumns((ASTNode) ast.getChild(0)); - AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols, + AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, partSpec, newCols, alterType); - addInputsOutputsAlterTable(tblName, null, alterTblDesc); + addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 25cd3a5..b69ad6a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -945,8 +945,6 @@ alterTableStatementSuffix @init { pushMsg("alter table statement", state); } @after { popMsg(state); } : alterStatementSuffixRename[true] - | alterStatementSuffixAddCol - | alterStatementSuffixRenameCol | alterStatementSuffixUpdateStatsCol | alterStatementSuffixDropPartitions[true] | alterStatementSuffixAddPartitions[true] @@ -974,6 +972,8 @@ alterTblPartitionStatementSuffix | alterStatementSuffixClusterbySortby | alterStatementSuffixCompact | alterStatementSuffixUpdateStatsCol + | alterStatementSuffixRenameCol + | alterStatementSuffixAddCol ; alterStatementPartitionKeyType diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index 8517319..298bbca 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -110,10 +110,12 @@ public AlterTableDesc() { * @param newComment * @param newType */ - public AlterTableDesc(String tblName, String oldColName, String newColName, + public AlterTableDesc(String tblName, HashMap partSpec, + String oldColName, String newColName, String newType, String newComment, boolean first, String afterCol) { super(); oldName = tblName; + this.partSpec = partSpec; this.oldColName = oldColName; this.newColName = newColName; newColType = newType; @@ -142,11 +144,12 @@ public AlterTableDesc(String oldName, String newName, boolean expectView) { * @param newCols * new columns to be added */ - public AlterTableDesc(String name, List newCols, + public AlterTableDesc(String name, HashMap partSpec, List newCols, AlterTableTypes alterType) { op = alterType; oldName = name; this.newCols = new ArrayList(newCols); + this.partSpec = partSpec; } /** diff --git ql/src/test/queries/clientpositive/alter_partition_change_col.q ql/src/test/queries/clientpositive/alter_partition_change_col.q new file mode 100644 index 0000000..8b2787e --- /dev/null +++ ql/src/test/queries/clientpositive/alter_partition_change_col.q @@ -0,0 +1,33 @@ + +create table alter_partition_change_col1 (c1 string, c2 decimal(10,0)) partitioned by (p1 string); +alter table alter_partition_change_col1 add partition (p1='abc'); +load data local inpath '../../data/files/dec.txt' overwrite into table alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1 where p1='abc'; + +-- Change the column type at the table level. Table-level describe shows the new type, but the existing partition does not. +alter table alter_partition_change_col1 change c2 c2 decimal(14,4); +describe alter_partition_change_col1; +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1 where p1='abc'; + +-- now change the column type of the existing partition +alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(14,4); +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1 where p1='abc'; + +-- Try out replace columns +alter table alter_partition_change_col1 replace columns (c1 string); +describe alter_partition_change_col1; +describe alter_partition_change_col1 partition (p1='abc'); +alter table alter_partition_change_col1 partition (p1='abc') replace columns (c1 string); +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1 where p1='abc'; + +-- Try add columns +alter table alter_partition_change_col1 add columns (c2 decimal(10,1)); +describe alter_partition_change_col1; +describe alter_partition_change_col1 partition (p1='abc'); +alter table alter_partition_change_col1 partition (p1='abc') add columns (c2 decimal(10,1)); +describe alter_partition_change_col1 partition (p1='abc'); +select * from alter_partition_change_col1 where p1='abc'; + diff --git ql/src/test/results/clientpositive/alter_partition_change_col.q.out ql/src/test/results/clientpositive/alter_partition_change_col.q.out new file mode 100644 index 0000000..dc1747b --- /dev/null +++ ql/src/test/results/clientpositive/alter_partition_change_col.q.out @@ -0,0 +1,305 @@ +PREHOOK: query: create table alter_partition_change_col1 (c1 string, c2 decimal(10,0)) partitioned by (p1 string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: create table alter_partition_change_col1 (c1 string, c2 decimal(10,0)) partitioned by (p1 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: alter table alter_partition_change_col1 add partition (p1='abc') +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: alter table alter_partition_change_col1 add partition (p1='abc') +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: load data local inpath '../../data/files/dec.txt' overwrite into table alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: query: load data local inpath '../../data/files/dec.txt' overwrite into table alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: select * from alter_partition_change_col1 where p1='abc' +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 235 abc +Beck 77 abc +Snow 56 abc +Mary 4 abc +Cluck 6 abc +Tom -12 abc +Mary 33 abc +Tom 19 abc +Beck 0 abc +Beck 80 abc +PREHOOK: query: -- Change the column type at the table level. Table-level describe shows the new type, but the existing partition does not. +alter table alter_partition_change_col1 change c2 c2 decimal(14,4) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: -- Change the column type at the table level. Table-level describe shows the new type, but the existing partition does not. +alter table alter_partition_change_col1 change c2 c2 decimal(14,4) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: describe alter_partition_change_col1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(10,0) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 where p1='abc' +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 235 abc +Beck 77 abc +Snow 56 abc +Mary 4 abc +Cluck 6 abc +Tom -12 abc +Mary 33 abc +Tom 19 abc +Beck 0 abc +Beck 80 abc +PREHOOK: query: -- now change the column type of the existing partition +alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(14,4) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: query: -- now change the column type of the existing partition +alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(14,4) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 where p1='abc' +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 234.79 abc +Beck 77.341 abc +Snow 55.71 abc +Mary 4.329 abc +Cluck 5.96 abc +Tom -12.25 abc +Mary 33.33 abc +Tom 19.00 abc +Beck 0.0 abc +Beck 79.9 abc +PREHOOK: query: -- Try out replace columns +alter table alter_partition_change_col1 replace columns (c1 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: -- Try out replace columns +alter table alter_partition_change_col1 replace columns (c1 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: describe alter_partition_change_col1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(14,4) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') replace columns (c1 string) +PREHOOK: type: ALTERTABLE_REPLACECOLS +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') replace columns (c1 string) +POSTHOOK: type: ALTERTABLE_REPLACECOLS +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 where p1='abc' +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom abc +Beck abc +Snow abc +Mary abc +Cluck abc +Tom abc +Mary abc +Tom abc +Beck abc +Beck abc +PREHOOK: query: -- Try add columns +alter table alter_partition_change_col1 add columns (c2 decimal(10,1)) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1 +POSTHOOK: query: -- Try add columns +alter table alter_partition_change_col1 add columns (c2 decimal(10,1)) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Output: default@alter_partition_change_col1 +PREHOOK: query: describe alter_partition_change_col1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(10,1) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') add columns (c2 decimal(10,1)) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Output: default@alter_partition_change_col1@p1=abc +POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') add columns (c2 decimal(10,1)) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +POSTHOOK: Output: default@alter_partition_change_col1@p1=abc +PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@alter_partition_change_col1 +c1 string +c2 decimal(10,1) +p1 string + +# Partition Information +# col_name data_type comment + +p1 string +PREHOOK: query: select * from alter_partition_change_col1 where p1='abc' +PREHOOK: type: QUERY +PREHOOK: Input: default@alter_partition_change_col1 +PREHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alter_partition_change_col1 +POSTHOOK: Input: default@alter_partition_change_col1@p1=abc +#### A masked pattern was here #### +Tom 234.8 abc +Beck 77.3 abc +Snow 55.7 abc +Mary 4.3 abc +Cluck 6.0 abc +Tom -12.3 abc +Mary 33.3 abc +Tom 19.0 abc +Beck 0.0 abc +Beck 79.9 abc