Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1057083) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -38,6 +38,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -67,8 +68,8 @@ import org.apache.hadoop.hive.ql.lockmgr.HiveLockManagerCtx; import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode; import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject; +import org.apache.hadoop.hive.ql.lockmgr.LockException; import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; -import org.apache.hadoop.hive.ql.lockmgr.LockException; import org.apache.hadoop.hive.ql.metadata.DummyPartition; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; @@ -1087,9 +1088,26 @@ public int close() { try { + if (plan != null) { + FetchTask fetchTask = plan.getFetchTask(); + if (null != fetchTask) { + try { + fetchTask.clearFetch(); + } catch (Exception e) { + LOG.debug(" Exception while clearing the Fetch task ", e); + } + } + } if (ctx != null) { ctx.clear(); } + if (null != resStream) { + try { + ((FSDataInputStream) resStream).close(); + } catch (Exception e) { + LOG.debug(" Exception while closing the resStream ", e); + } + } } catch (Exception e) { console.printError("FAILED: Hive Internal Error: " + Utilities.getNameMessage(e) + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (revision 1057083) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (working copy) @@ -28,13 +28,14 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.DelimitedJSONSerDe; +import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; -import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.ReflectionUtils; @@ -163,11 +164,24 @@ @Override protected void localizeMRTmpFilesImpl(Context ctx) { String s = work.getTblDir(); - if ((s != null) && ctx.isMRTmpFileURI(s)) + if ((s != null) && ctx.isMRTmpFileURI(s)) { work.setTblDir(ctx.localizeMRTmpFileURI(s)); + } ArrayList ls = work.getPartDir(); - if (ls != null) + if (ls != null) { ctx.localizePaths(ls); + } } + + /** + * Clear the Fetch Operator. + * + * @throws HiveException + */ + public void clearFetch() throws HiveException { + if (null != ftOp) { + ftOp.clearFetchContext(); + } + } }