Index: cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java =================================================================== --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (revision 1124130) +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (working copy) @@ -61,6 +61,7 @@ import org.apache.hadoop.hive.service.HiveClient; import org.apache.hadoop.hive.service.HiveServerException; import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.io.IOUtils; import org.apache.thrift.TException; import sun.misc.Signal; @@ -387,14 +388,18 @@ public int processFile(String fileName) throws IOException { FileReader fileReader = null; + BufferedReader bufferReader = null; + int processReader = 0; try { fileReader = new FileReader(fileName); - return processReader(new BufferedReader(fileReader)); + bufferReader = new BufferedReader(fileReader); + processReader = processReader(bufferReader); + bufferReader.close(); + bufferReader = null; } finally { - if (fileReader != null) { - fileReader.close(); - } + IOUtils.closeStream(bufferReader); } + return processReader; } public void processInitFiles(CliSessionState ss) throws IOException { Index: contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java =================================================================== --- contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java (revision 1124130) +++ contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java (working copy) @@ -32,6 +32,7 @@ import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.MapWritable; @@ -380,8 +381,10 @@ } public Writable readWritable(Writable writable) throws IOException { + DataInputStream dis = null; + try { ByteArrayInputStream bais = new ByteArrayInputStream(in.readBytes()); - DataInputStream dis = new DataInputStream(bais); + dis = new DataInputStream(bais); String className = WritableUtils.readString(dis); if (writable == null) { try { @@ -395,7 +398,12 @@ throw new IOException("wrong Writable class given"); } writable.readFields(dis); + dis.close(); + dis = null; return writable; + } finally { + IOUtils.closeStream(dis); + } } public Writable readWritable() throws IOException { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1124130) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -142,6 +142,7 @@ import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.ToolRunner; /** @@ -162,6 +163,7 @@ private static String INTERMEDIATE_ORIGINAL_DIR_SUFFIX; private static String INTERMEDIATE_EXTRACTED_DIR_SUFFIX; + @Override public boolean requireLock() { return this.work != null && this.work.getNeedLock(); } @@ -435,10 +437,11 @@ } private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { + DataOutput outStream = null; try { Path resFile = new Path(showGrantDesc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc(); PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj(); String principalName = principalDesc.getName(); @@ -581,6 +584,7 @@ } } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.info("show table status: " + stringifyException(e)); return 1; @@ -590,6 +594,8 @@ } catch (Exception e) { e.printStackTrace(); throw new HiveException(e); + } finally{ + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -732,6 +738,7 @@ private int roleDDL(RoleDDLDesc roleDDLDesc) { RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); + DataOutput outStream = null; try { if (operation.equals(RoleDDLDesc.RoleOperation.CREATE_ROLE)) { db.createRole(roleDDLDesc.getName(), roleDDLDesc.getRoleOwnerName()); @@ -743,12 +750,13 @@ if (roles != null && roles.size() > 0) { Path resFile = new Path(roleDDLDesc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); for (Role role : roles) { outStream.writeBytes("role name:" + role.getRoleName()); outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } } else { throw new HiveException("Unkown role operation " @@ -762,6 +770,8 @@ } catch (IOException e) { LOG.info("role ddl exception: " + stringifyException(e)); return 1; + } finally{ + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; @@ -1613,11 +1623,12 @@ parts = db.getPartitionNames(tbl.getDbName(), tbl.getTableName(), (short) -1); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showParts.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); Iterator iterParts = parts.iterator(); while (iterParts.hasNext()) { @@ -1626,6 +1637,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.info("show partitions: " + stringifyException(e)); throw new HiveException(e.toString()); @@ -1634,6 +1646,8 @@ throw new HiveException(e.toString()); } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; @@ -1660,11 +1674,12 @@ indexes = db.getIndexes(tbl.getDbName(), tbl.getTableName(), (short) -1); + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showIndexes.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); if (showIndexes.isFormatted()) { // column headers @@ -1679,6 +1694,7 @@ } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.info("show indexes: " + stringifyException(e)); @@ -1688,6 +1704,8 @@ throw new HiveException(e.toString()); } catch (Exception e) { throw new HiveException(e.toString()); + } finally{ + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; @@ -1713,11 +1731,12 @@ } LOG.info("results : " + databases.size()); + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showDatabasesDesc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); for (String database : databases) { // create a row per database name @@ -1725,6 +1744,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("show databases: " + stringifyException(e)); return 1; @@ -1733,6 +1753,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -1765,11 +1787,12 @@ tbls = db.getAllTables(dbName); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showTbls.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); SortedSet sortedTbls = new TreeSet(tbls); Iterator iterTbls = sortedTbls.iterator(); @@ -1779,6 +1802,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("show table: " + stringifyException(e)); return 1; @@ -1787,6 +1811,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally{ + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -1811,11 +1837,12 @@ funcs = FunctionRegistry.getFunctionNames(); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showFuncs.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); SortedSet sortedFuncs = new TreeSet(funcs); Iterator iterFuncs = sortedFuncs.iterator(); @@ -1825,6 +1852,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("show function: " + stringifyException(e)); return 1; @@ -1833,6 +1861,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally{ + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -1854,11 +1884,12 @@ throw new HiveException("show Locks LockManager not specified"); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showLocks.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); List locks = null; if (showLocks.getTableName() == null) { @@ -1909,6 +1940,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("show function: " + stringifyException(e)); return 1; @@ -1917,6 +1949,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally{ + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -2033,11 +2067,12 @@ private int describeFunction(DescFunctionDesc descFunc) throws HiveException { String funcName = descFunc.getName(); + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(descFunc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); // get the function documentation Description desc = null; @@ -2073,6 +2108,7 @@ outStream.write(terminator); ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("describe function: " + stringifyException(e)); return 1; @@ -2081,15 +2117,18 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally{ + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } private int descDatabase(DescDatabaseDesc descDatabase) throws HiveException { + DataOutput outStream = null; try { Path resFile = new Path(descDatabase.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); Database database = db.getDatabase(descDatabase.getDatabaseName()); @@ -2117,6 +2156,7 @@ outStream.write(terminator); ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("describe database: " + stringifyException(e)); @@ -2126,6 +2166,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -2167,11 +2209,12 @@ LOG.info("results : " + tblStr.size()); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showTblStatus.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); Iterator iterTables = tbls.iterator(); while (iterTables.hasNext()) { @@ -2250,6 +2293,8 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; + } catch (FileNotFoundException e) { LOG.info("show table status: " + stringifyException(e)); return 1; @@ -2258,6 +2303,8 @@ return 1; } catch (Exception e) { throw new HiveException(e); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -2284,23 +2331,35 @@ try { Path resFile = new Path(descTbl.getResFile()); if (tbl == null) { + DataOutput outStream = null; + try { FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = (DataOutput) fs.open(resFile); + outStream = (DataOutput) fs.open(resFile); String errMsg = "Table " + tableName + " does not exist"; outStream.write(errMsg.getBytes("UTF-8")); ((FSDataOutputStream) outStream).close(); + outStream = null; return 0; + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); + } } if (descTbl.getPartSpec() != null) { part = db.getPartition(tbl, descTbl.getPartSpec(), false); if (part == null) { + DataOutput outStream = null; + try { FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = (DataOutput) fs.open(resFile); + outStream = (DataOutput) fs.open(resFile); String errMsg = "Partition " + descTbl.getPartSpec() + " for table " + tableName + " does not exist"; outStream.write(errMsg.getBytes("UTF-8")); ((FSDataOutputStream) outStream).close(); + outStream = null; return 0; + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); + } } tbl = part.getTable(); } @@ -2312,13 +2371,14 @@ return 1; } + DataOutput outStream = null; try { LOG.info("DDLTask: got data for " + tbl.getTableName()); Path resFile = new Path(descTbl.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); if (colPath.equals(tableName)) { if (!descTbl.isFormatted()) { @@ -2374,7 +2434,7 @@ LOG.info("DDLTask: written data for " + tbl.getTableName()); ((FSDataOutputStream) outStream).close(); - + outStream = null; } catch (FileNotFoundException e) { LOG.info("describe table: " + stringifyException(e)); return 1; @@ -2383,6 +2443,8 @@ return 1; } catch (Exception e) { throw new HiveException(e); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; Index: ql/src/java/org/apache/hadoop/hive/ql/io/RCFileInputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/RCFileInputFormat.java (revision 1124130) +++ ql/src/java/org/apache/hadoop/hive/ql/io/RCFileInputFormat.java (working copy) @@ -64,12 +64,18 @@ return false; } for (int fileId = 0; fileId < files.size(); fileId++) { + RCFile.Reader reader = null; try { - RCFile.Reader reader = new RCFile.Reader(fs, files.get(fileId) + reader = new RCFile.Reader(fs, files.get(fileId) .getPath(), conf); reader.close(); + reader = null; } catch (IOException e) { return false; + } finally { + if (null != reader) { + reader.close(); + } } } return true;