diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java index b23129b..87e96a9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java @@ -72,6 +72,8 @@ private static final LogHelper console = new LogHelper(LOG); private final PerfLogger perfLogger = SessionState.getPerfLogger(); private static final long serialVersionUID = 1L; + private transient String sparkJobID; + private transient SparkStatistics sparkStatistics; @Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, @@ -98,11 +100,12 @@ public int execute(DriverContext driverContext) { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.SPARK_SUBMIT_JOB); addToHistory(jobRef); + sparkJobID = jobRef.getJobId(); this.jobID = jobRef.getSparkJobStatus().getAppID(); rc = jobRef.monitorJob(); SparkJobStatus sparkJobStatus = jobRef.getSparkJobStatus(); if (rc == 0) { - SparkStatistics sparkStatistics = sparkJobStatus.getSparkStatistics(); + sparkStatistics = sparkJobStatus.getSparkStatistics(); if (LOG.isInfoEnabled() && sparkStatistics != null) { LOG.info(String.format("=====Spark Job[%s] statistics=====", jobRef.getJobId())); logSparkStatistic(sparkStatistics); @@ -228,6 +231,14 @@ public String getName() { return ((ReduceWork) children.get(0)).getReducer(); } + public String getSparkJobID() { + return sparkJobID; + } + + public SparkStatistics getSparkStatistics() { + return sparkStatistics; + } + /** * Set the number of reducers for the spark work. */