diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java index 52af3af..a2876e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java @@ -99,16 +99,25 @@ public Object process(Node nd, Stack stack, Preconditions.checkArgument(alias != null, "AssertionError: expected alias to be not null"); SparkWork sparkWork = context.currentTask.getWork(); - boolean partialScan = parseContext.getQueryProperties().isPartialScanAnalyzeCommand(); - boolean noScan = parseContext.getQueryProperties().isNoScanAnalyzeCommand(); - if ((OrcInputFormat.class.isAssignableFrom(inputFormat) || - MapredParquetInputFormat.class.isAssignableFrom(inputFormat)) && (noScan || partialScan)) { + if (OrcInputFormat.class.isAssignableFrom(inputFormat) || + MapredParquetInputFormat.class.isAssignableFrom(inputFormat)) { + // For ORC & Parquet, all the following statements are the same + // ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS // ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS partialscan; // ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS noscan; // There will not be any Spark job above this task StatsNoJobWork snjWork = new StatsNoJobWork(tableScan.getConf().getTableMetadata().getTableSpec()); snjWork.setStatsReliable(parseContext.getConf().getBoolVar( HiveConf.ConfVars.HIVE_STATS_RELIABLE)); + // If partition is specified, get pruned partition list + Set confirmedParts = GenMapRedUtils.getConfirmedPartitionsForScan(tableScan); + if (confirmedParts.size() > 0) { + Table source = tableScan.getConf().getTableMetadata(); + List partCols = GenMapRedUtils.getPartitionColumns(tableScan); + PrunedPartitionList partList = new PrunedPartitionList(source, confirmedParts, partCols, + false); + snjWork.setPrunedPartitionList(partList); + } Task snjTask = TaskFactory.get(snjWork, parseContext.getConf()); snjTask.setParentTasks(null); context.rootTasks.remove(context.currentTask);