diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index b0f124b..06ae2e9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -78,6 +78,7 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; @@ -1764,8 +1765,7 @@ public boolean dropPartition(String db_name, String tbl_name, short limit) throws HiveException { if (!tbl.isPartitioned()) { - throw new HiveException("Partition spec should only be supplied for a " + - "partitioned table"); + throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName()); } List partialPvals = MetaStoreUtils.getPvals(tbl.getPartCols(), partialPartSpec); @@ -1818,8 +1818,7 @@ public boolean dropPartition(String db_name, String tbl_name, throws HiveException { if (!tbl.isPartitioned()) { - throw new HiveException("Partition spec should only be supplied for a " + - "partitioned table"); + throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName()); } List names = getPartitionNames(tbl.getDbName(), tbl.getTableName(), @@ -1843,8 +1842,7 @@ public boolean dropPartition(String db_name, String tbl_name, throws HiveException { if (!tbl.isPartitioned()) { - throw new HiveException("Partition spec should only be supplied for a " + - "partitioned table"); + throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName()); } List partitions = new ArrayList(partNames.size()); @@ -1895,8 +1893,7 @@ public boolean dropPartition(String db_name, String tbl_name, throws HiveException, MetaException, NoSuchObjectException, TException { if (!tbl.isPartitioned()) { - throw new HiveException("Partition spec should only be supplied for a " + - "partitioned table"); + throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName()); } List tParts = getMSC().listPartitionsByFilter( diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 037191a..24c47fc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -128,10 +128,10 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.util.StringUtils; /** * DDLSemanticAnalyzer. @@ -2932,7 +2932,9 @@ private void addTablePartsOutputs(String tblName, List> part try { parts = db.getPartitions(tab, partSpec); } catch (HiveException e) { - LOG.error("Got HiveException during obtaining list of partitions"); + LOG.error("Got HiveException during obtaining list of partitions" + + StringUtils.stringifyException(e)); + throw new SemanticException(e.getMessage(), e); } } else { parts = new ArrayList(); @@ -2942,7 +2944,8 @@ private void addTablePartsOutputs(String tblName, List> part parts.add(p); } } catch (HiveException e) { - LOG.debug("Wrong specification"); + LOG.debug("Wrong specification" + StringUtils.stringifyException(e)); + throw new SemanticException(e.getMessage(), e); } } if (parts.isEmpty()) { diff --git ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out index 384fcbe..d276230 100644 --- ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out +++ ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out @@ -23,8 +23,4 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alter_rename_partition_src POSTHOOK: Output: default@alter_rename_partition@pcol1=old_part1%3A/pcol2=old_part2%3A POSTHOOK: Lineage: alter_rename_partition PARTITION(pcol1=old_part1:,pcol2=old_part2:).col1 SIMPLE [(alter_rename_partition_src)alter_rename_partition_src.FieldSchema(name:col1, type:string, comment:null), ] -PREHOOK: query: alter table alter_rename_partition partition (pCol1='old_part1:', pcol2='old_part2:') rename to partition (pCol1='old_part1:', pcol2='old_part2:', pcol3='old_part3:') -PREHOOK: type: ALTERTABLE_RENAMEPART -PREHOOK: Input: default@alter_rename_partition -PREHOOK: Output: default@alter_rename_partition@pcol1=old_part1%3A/pcol2=old_part2%3A -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to rename partition. +FAILED: SemanticException table is partitioned but partition spec is not specified or does not fully match table partitioning: {pcol1=old_part1:, pcol2=old_part2:, pcol3=old_part3:} diff --git ql/src/test/results/clientnegative/touch2.q.out ql/src/test/results/clientnegative/touch2.q.out index 045121a..91d8283 100644 --- ql/src/test/results/clientnegative/touch2.q.out +++ ql/src/test/results/clientnegative/touch2.q.out @@ -1,4 +1 @@ -PREHOOK: query: ALTER TABLE src TOUCH PARTITION (ds='2008-04-08', hr='12') -PREHOOK: type: ALTERTABLE_TOUCH -PREHOOK: Input: default@src -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. table is not partitioned but partition spec exists: {ds=2008-04-08, hr=12} +FAILED: SemanticException table is not partitioned but partition spec exists: {ds=2008-04-08, hr=12}