.../org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java index f5662f0..88fe103 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java @@ -76,7 +76,7 @@ public void load( MapJoinTableContainerSerDe[] mapJoinTableSerdes) throws HiveException { String currentInputPath = context.getCurrentInputPath().toString(); - LOG.info("******* Load from HashTable for input file: " + currentInputPath); + LOG.info("Load from HashTable for input file: {}", currentInputPath); MapredLocalWork localWork = context.getLocalWork(); try { if (localWork.getDirectFetchOp() != null) { @@ -92,9 +92,9 @@ public void load( continue; } Path path = Utilities.generatePath(baseDir, desc.getDumpFilePrefix(), (byte)pos, fileName); - LOG.info("\tLoad back 1 hashtable file from tmp file uri:" + path); + LOG.info("Load back 1 hashtable file from tmp file uri: {}", path); ObjectInputStream in = new ObjectInputStream(new BufferedInputStream( - new FileInputStream(path.toUri().getPath()), 4096)); + new FileInputStream(path.toUri().getPath()))); try{ mapJoinTables[pos] = mapJoinTableSerdes[pos].load(in); } finally { @@ -115,12 +115,10 @@ private Path getBaseDir(MapredLocalWork localWork) throws Exception { String stageID = localWork.getStageID(); String suffix = Utilities.generateTarFileName(stageID); FileSystem localFs = FileSystem.getLocal(hconf); - for (int j = 0; j < localArchives.length; j++) { - Path archive = localArchives[j]; - if (!archive.getName().endsWith(suffix)) { - continue; + for (Path archive : localArchives) { + if (archive.getName().endsWith(suffix)) { + return archive.makeQualified(localFs); } - return archive.makeQualified(localFs); } } return null;