Index: cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java =================================================================== --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (revision 1144190) +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (working copy) @@ -61,6 +61,7 @@ import org.apache.hadoop.hive.service.HiveClient; import org.apache.hadoop.hive.service.HiveServerException; import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.io.IOUtils; import org.apache.thrift.TException; import sun.misc.Signal; @@ -387,14 +388,18 @@ public int processFile(String fileName) throws IOException { FileReader fileReader = null; + BufferedReader bufferReader = null; + int processReader = 0; try { fileReader = new FileReader(fileName); - return processReader(new BufferedReader(fileReader)); + bufferReader = new BufferedReader(fileReader); + processReader = processReader(bufferReader); + bufferReader.close(); + bufferReader = null; } finally { - if (fileReader != null) { - fileReader.close(); - } + IOUtils.closeStream(bufferReader); } + return processReader; } public void processInitFiles(CliSessionState ss) throws IOException { Index: contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java =================================================================== --- contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java (revision 1144190) +++ contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesWritableInput.java (working copy) @@ -32,6 +32,7 @@ import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.MapWritable; @@ -380,8 +381,10 @@ } public Writable readWritable(Writable writable) throws IOException { + DataInputStream dis = null; + try { ByteArrayInputStream bais = new ByteArrayInputStream(in.readBytes()); - DataInputStream dis = new DataInputStream(bais); + dis = new DataInputStream(bais); String className = WritableUtils.readString(dis); if (writable == null) { try { @@ -395,7 +398,12 @@ throw new IOException("wrong Writable class given"); } writable.readFields(dis); + dis.close(); + dis = null; return writable; + } finally { + IOUtils.closeStream(dis); + } } public Writable readWritable() throws IOException { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1144190) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -38,10 +38,10 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; +import java.util.Map.Entry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -97,7 +97,6 @@ import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -130,6 +129,7 @@ import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.serde.Constants; @@ -142,6 +142,7 @@ import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.ToolRunner; /** @@ -436,10 +437,11 @@ } private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { + DataOutput outStream = null; try { Path resFile = new Path(showGrantDesc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc(); PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj(); String principalName = principalDesc.getName(); @@ -582,6 +584,7 @@ } } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.info("show table status: " + stringifyException(e)); return 1; @@ -591,6 +594,8 @@ } catch (Exception e) { e.printStackTrace(); throw new HiveException(e); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -733,6 +738,7 @@ private int roleDDL(RoleDDLDesc roleDDLDesc) { RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); + DataOutput outStream = null; try { if (operation.equals(RoleDDLDesc.RoleOperation.CREATE_ROLE)) { db.createRole(roleDDLDesc.getName(), roleDDLDesc.getRoleOwnerName()); @@ -744,12 +750,13 @@ if (roles != null && roles.size() > 0) { Path resFile = new Path(roleDDLDesc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); for (Role role : roles) { outStream.writeBytes("role name:" + role.getRoleName()); outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } } else { throw new HiveException("Unkown role operation " @@ -763,8 +770,9 @@ } catch (IOException e) { LOG.info("role ddl exception: " + stringifyException(e)); return 1; + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } - return 0; } @@ -1619,11 +1627,12 @@ parts = db.getPartitionNames(tbl.getDbName(), tbl.getTableName(), (short) -1); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showParts.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); Iterator iterParts = parts.iterator(); while (iterParts.hasNext()) { @@ -1632,6 +1641,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.info("show partitions: " + stringifyException(e)); throw new HiveException(e.toString()); @@ -1640,6 +1650,8 @@ throw new HiveException(e.toString()); } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; @@ -1666,11 +1678,12 @@ indexes = db.getIndexes(tbl.getDbName(), tbl.getTableName(), (short) -1); + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showIndexes.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); if (showIndexes.isFormatted()) { // column headers @@ -1685,6 +1698,7 @@ } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.info("show indexes: " + stringifyException(e)); @@ -1694,6 +1708,8 @@ throw new HiveException(e.toString()); } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; @@ -1719,11 +1735,12 @@ } LOG.info("results : " + databases.size()); + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showDatabasesDesc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); for (String database : databases) { // create a row per database name @@ -1731,6 +1748,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("show databases: " + stringifyException(e)); return 1; @@ -1739,6 +1757,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -1771,11 +1791,12 @@ tbls = db.getAllTables(dbName); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showTbls.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); SortedSet sortedTbls = new TreeSet(tbls); Iterator iterTbls = sortedTbls.iterator(); @@ -1785,6 +1806,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("show table: " + stringifyException(e)); return 1; @@ -1793,6 +1815,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -1817,11 +1841,12 @@ funcs = FunctionRegistry.getFunctionNames(); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showFuncs.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); SortedSet sortedFuncs = new TreeSet(funcs); Iterator iterFuncs = sortedFuncs.iterator(); @@ -1831,6 +1856,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("show function: " + stringifyException(e)); return 1; @@ -1839,6 +1865,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -1860,11 +1888,12 @@ throw new HiveException("show Locks LockManager not specified"); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showLocks.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); List locks = null; if (showLocks.getTableName() == null) { @@ -1915,6 +1944,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("show function: " + stringifyException(e)); return 1; @@ -1923,6 +1953,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -2039,11 +2071,12 @@ private int describeFunction(DescFunctionDesc descFunc) throws HiveException { String funcName = descFunc.getName(); + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(descFunc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); // get the function documentation Description desc = null; @@ -2079,6 +2112,7 @@ outStream.write(terminator); ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.warn("describe function: " + stringifyException(e)); return 1; @@ -2087,15 +2121,18 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } private int descDatabase(DescDatabaseDesc descDatabase) throws HiveException { + DataOutput outStream = null; try { Path resFile = new Path(descDatabase.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); Database database = db.getDatabase(descDatabase.getDatabaseName()); @@ -2123,7 +2160,7 @@ outStream.write(terminator); ((FSDataOutputStream) outStream).close(); - + outStream = null; } catch (FileNotFoundException e) { LOG.warn("describe database: " + stringifyException(e)); return 1; @@ -2132,6 +2169,8 @@ return 1; } catch (Exception e) { throw new HiveException(e.toString()); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -2173,11 +2212,12 @@ LOG.info("results : " + tblStr.size()); } + DataOutput outStream = null; // write the results in the file try { Path resFile = new Path(showTblStatus.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); Iterator iterTables = tbls.iterator(); while (iterTables.hasNext()) { @@ -2256,6 +2296,7 @@ outStream.write(terminator); } ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.info("show table status: " + stringifyException(e)); return 1; @@ -2264,6 +2305,8 @@ return 1; } catch (Exception e) { throw new HiveException(e); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; } @@ -2287,25 +2330,28 @@ // describe the table - populate the output stream Table tbl = db.getTable(tableName, false); Partition part = null; + DataOutput outStream = null; try { Path resFile = new Path(descTbl.getResFile()); if (tbl == null) { FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = (DataOutput) fs.open(resFile); + outStream = (DataOutput) fs.open(resFile); String errMsg = "Table " + tableName + " does not exist"; outStream.write(errMsg.getBytes("UTF-8")); ((FSDataOutputStream) outStream).close(); + outStream = null; return 0; } if (descTbl.getPartSpec() != null) { part = db.getPartition(tbl, descTbl.getPartSpec(), false); if (part == null) { FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = (DataOutput) fs.open(resFile); + outStream = (DataOutput) fs.open(resFile); String errMsg = "Partition " + descTbl.getPartSpec() + " for table " + tableName + " does not exist"; outStream.write(errMsg.getBytes("UTF-8")); ((FSDataOutputStream) outStream).close(); + outStream = null; return 0; } tbl = part.getTable(); @@ -2316,6 +2362,8 @@ } catch (IOException e) { LOG.info("describe table: " + stringifyException(e)); return 1; + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } try { @@ -2324,7 +2372,7 @@ Path resFile = new Path(descTbl.getResFile()); FileSystem fs = resFile.getFileSystem(conf); - DataOutput outStream = fs.create(resFile); + outStream = fs.create(resFile); if (colPath.equals(tableName)) { if (!descTbl.isFormatted()) { @@ -2380,6 +2428,7 @@ LOG.info("DDLTask: written data for " + tbl.getTableName()); ((FSDataOutputStream) outStream).close(); + outStream = null; } catch (FileNotFoundException e) { LOG.info("describe table: " + stringifyException(e)); @@ -2389,6 +2438,8 @@ return 1; } catch (Exception e) { throw new HiveException(e); + } finally { + IOUtils.closeStream((FSDataOutputStream) outStream); } return 0; Index: ql/src/java/org/apache/hadoop/hive/ql/io/RCFileInputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/RCFileInputFormat.java (revision 1144190) +++ ql/src/java/org/apache/hadoop/hive/ql/io/RCFileInputFormat.java (working copy) @@ -64,12 +64,18 @@ return false; } for (int fileId = 0; fileId < files.size(); fileId++) { + RCFile.Reader reader = null; try { - RCFile.Reader reader = new RCFile.Reader(fs, files.get(fileId) + reader = new RCFile.Reader(fs, files.get(fileId) .getPath(), conf); reader.close(); + reader = null; } catch (IOException e) { return false; + } finally { + if (null != reader) { + reader.close(); + } } } return true;