diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index fad5ed3..24f829f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -1279,7 +1279,7 @@ private int alterTableAlterPart(Hive db, AlterTableAlterPartDesc alterPartitionD assert(tbl.isPartitioned()); List newPartitionKeys = new ArrayList(); - + //Check if the existing partition values can be type casted to the new column type // with a non null value before trying to alter the partition column type. try { @@ -1291,19 +1291,19 @@ private int alterTableAlterPart(Hive db, AlterTableAlterPartDesc alterPartitionD break; } } - + if (colIndex == -1 || colIndex == tbl.getTTable().getPartitionKeys().size()) { - throw new HiveException("Cannot find partition column " + + throw new HiveException("Cannot find partition column " + alterPartitionDesc.getPartKeySpec().getName()); } - + TypeInfo expectedType = TypeInfoUtils.getTypeInfoFromTypeString(alterPartitionDesc.getPartKeySpec().getType()); ObjectInspector outputOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(expectedType); Converter converter = ObjectInspectorConverters.getConverter( - PrimitiveObjectInspectorFactory.javaStringObjectInspector, outputOI); - + PrimitiveObjectInspectorFactory.javaStringObjectInspector, outputOI); + // For all the existing partitions, check if the value can be type casted to a non-null object for(Partition part : partitions) { if (part.getName().equals(conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME))) { @@ -1312,23 +1312,23 @@ private int alterTableAlterPart(Hive db, AlterTableAlterPartDesc alterPartitionD try { String value = part.getValues().get(colIndex); Object convertedValue = - converter.convert(value); + converter.convert(value); if (convertedValue == null) { throw new HiveException(" Converting from " + TypeInfoFactory.stringTypeInfo + " to " + expectedType + " for value : " + value + " resulted in NULL object"); } } catch (Exception e) { - throw new HiveException("Exception while converting " + + throw new HiveException("Exception while converting " + TypeInfoFactory.stringTypeInfo + " to " + expectedType + " for value : " + part.getValues().get(colIndex)); - } + } } } catch(Exception e) { throw new HiveException( "Exception while checking type conversion of existing partition values to " + alterPartitionDesc.getPartKeySpec() + " : " + e.getMessage()); } - + for(FieldSchema col : tbl.getTTable().getPartitionKeys()) { if (col.getName().compareTo(alterPartitionDesc.getPartKeySpec().getName()) == 0) { newPartitionKeys.add(alterPartitionDesc.getPartKeySpec()); @@ -3439,8 +3439,13 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException { String[] dbTab = splitTableName(tableName); List colNames = new ArrayList(); colNames.add(colName.toLowerCase()); - colStats = db.getTableColumnStatistics(dbTab[0].toLowerCase(), - dbTab[1].toLowerCase(), colNames); + if (null == part) { + colStats = db.getTableColumnStatistics(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames); + } else { + List partitions = new ArrayList(); + partitions.add(part.getName()); + colStats = db.getPartitionColumnStatistics(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), partitions, colNames).get(part.getName()); + } } } @@ -3701,7 +3706,7 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException { MetadataTypedColumnsetSerDe.class.getName()) && !tbl.getSerializationLib().equals(LazySimpleSerDe.class.getName()) && !tbl.getSerializationLib().equals(ColumnarSerDe.class.getName()) - && !tbl.getSerializationLib().equals(DynamicSerDe.class.getName()) + && !tbl.getSerializationLib().equals(DynamicSerDe.class.getName()) && !tbl.getSerializationLib().equals(ParquetHiveSerDe.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName()); } diff --git ql/src/test/queries/clientpositive/columnstats_partlvl.q ql/src/test/queries/clientpositive/columnstats_partlvl.q index 8bf6c70..99b859c 100644 --- ql/src/test/queries/clientpositive/columnstats_partlvl.q +++ ql/src/test/queries/clientpositive/columnstats_partlvl.q @@ -23,3 +23,5 @@ explain analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns; analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns; +describe formatted Employee_Part.employeeID partition (employeeSalary=2000.0); +describe formatted Employee_Part.employeeName partition (employeeSalary=2000.0); diff --git ql/src/test/results/clientpositive/columnstats_partlvl.q.out ql/src/test/results/clientpositive/columnstats_partlvl.q.out index a4c4677..6128770 100644 --- ql/src/test/results/clientpositive/columnstats_partlvl.q.out +++ ql/src/test/results/clientpositive/columnstats_partlvl.q.out @@ -474,3 +474,21 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@employee_part POSTHOOK: Input: default@employee_part@employeesalary=2000.0 #### A masked pattern was here #### +PREHOOK: query: describe formatted Employee_Part.employeeID partition (employeeSalary=2000.0) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@employee_part +POSTHOOK: query: describe formatted Employee_Part.employeeID partition (employeeSalary=2000.0) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@employee_part +# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment + +employeeID int 16 34 1 14 from deserializer +PREHOOK: query: describe formatted Employee_Part.employeeName partition (employeeSalary=2000.0) +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@employee_part +POSTHOOK: query: describe formatted Employee_Part.employeeName partition (employeeSalary=2000.0) +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@employee_part +# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment + +employeeName string 1 9 4.3076923076923075 6 from deserializer