diff --git ql/src/java/org/apache/hadoop/hive/ql/Context.java ql/src/java/org/apache/hadoop/hive/ql/Context.java
index 7b861ed..9493ffe 100644
--- ql/src/java/org/apache/hadoop/hive/ql/Context.java
+++ ql/src/java/org/apache/hadoop/hive/ql/Context.java
@@ -347,9 +347,9 @@ public String localizeMRTmpFileURI(String originalURI) {
*
* @return next available tmp path on local fs
*/
- public String getLocalTmpFileURI() {
- return getLocalScratchDir(true) + Path.SEPARATOR + LOCAL_PREFIX +
- nextPathId();
+ public Path getLocalTmpFileURI() {
+ return new Path(getLocalScratchDir(true), Path.SEPARATOR + LOCAL_PREFIX +
+ nextPathId());
}
/**
@@ -489,13 +489,6 @@ public void resetStream() {
}
/**
- * Little abbreviation for StringUtils.
- */
- private static boolean strEquals(String str1, String str2) {
- return org.apache.commons.lang.StringUtils.equals(str1, str2);
- }
-
- /**
* Set the token rewrite stream being used to parse the current top-level SQL
* statement. Note that this should not be used for other parsing
* activities; for example, when we encounter a reference to a view, we switch
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index daf4e4a..8a5c2b0 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -2383,7 +2383,7 @@ public static String generateFileName(Byte tag, String bigBucketFileName) {
return fileName;
}
- public static String generateTmpURI(String baseURI, String id) {
+ public static String generateTmpURI(Path baseURI, String id) {
String tmpFileURI = new String(baseURI + Path.SEPARATOR + "HashTable-" + id);
return tmpFileURI;
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index a7e2253..0311596 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -188,7 +188,7 @@ public int execute(DriverContext driverContext) {
if (!files.isEmpty()) {
cmdLine = cmdLine + " -files " + files;
- workDir = (new Path(ctx.getLocalTmpFileURI())).toUri().getPath();
+ workDir = ctx.getLocalTmpFileURI().toUri().getPath();
if (! (new File(workDir)).mkdir()) {
throw new IOException ("Cannot create tmp working dir: " + workDir);
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index 0cc90d0..e0904dc 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -157,7 +157,7 @@ public int execute(DriverContext driverContext) {
if (!files.isEmpty()) {
cmdLine = cmdLine + " -files " + files;
- workDir = (new Path(ctx.getLocalTmpFileURI())).toUri().getPath();
+ workDir = ctx.getLocalTmpFileURI().toUri().getPath();
if (!(new File(workDir)).mkdir()) {
throw new IOException("Cannot create tmp working dir: " + workDir);
diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
index 010ac54..3482e57 100644
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
@@ -26,6 +26,7 @@
import java.util.Map;
import java.util.Stack;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.exec.ConditionalTask;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
@@ -103,7 +104,7 @@ private void processCurrentTask(Task extends Serializable> currTask,
Context ctx = physicalContext.getContext();
String tmpFileURI = Utilities.generateTmpURI(ctx.getLocalTmpFileURI(), currTask.getId());
localwork.setTmpFileURI(tmpFileURI);
- String hdfsTmpURI = Utilities.generateTmpURI(ctx.getMRTmpFileURI(), currTask.getId());
+ String hdfsTmpURI = Utilities.generateTmpURI(new Path(ctx.getMRTmpFileURI()), currTask.getId());
mapredWork.getMapWork().setTmpHDFSFileURI(hdfsTmpURI);
// create a task for this local work; right now, this local work is shared
// by the original MapredTask and this new generated MapredLocalTask.
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0f60fcb..5fde65c 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -270,51 +270,51 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
analyzeDropIndex(ast);
break;
case HiveParser.TOK_DESCTABLE:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeDescribeTable(ast);
break;
case HiveParser.TOK_SHOWDATABASES:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowDatabases(ast);
break;
case HiveParser.TOK_SHOWTABLES:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowTables(ast);
break;
case HiveParser.TOK_SHOWCOLUMNS:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowColumns(ast);
break;
case HiveParser.TOK_SHOW_TABLESTATUS:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowTableStatus(ast);
break;
case HiveParser.TOK_SHOW_TBLPROPERTIES:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowTableProperties(ast);
break;
case HiveParser.TOK_SHOWFUNCTIONS:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowFunctions(ast);
break;
case HiveParser.TOK_SHOWLOCKS:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowLocks(ast);
break;
case HiveParser.TOK_SHOWDBLOCKS:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowDbLocks(ast);
break;
case HiveParser.TOK_DESCFUNCTION:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeDescFunction(ast);
break;
case HiveParser.TOK_DESCDATABASE:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeDescDatabase(ast);
break;
case HiveParser.TOK_MSCK:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeMetastoreCheck(ast);
break;
case HiveParser.TOK_DROPVIEW:
@@ -384,15 +384,15 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
analyzeAlterIndexProps(ast);
break;
case HiveParser.TOK_SHOWPARTITIONS:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowPartitions(ast);
break;
case HiveParser.TOK_SHOW_CREATETABLE:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowCreateTable(ast);
break;
case HiveParser.TOK_SHOWINDEXES:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowIndexes(ast);
break;
case HiveParser.TOK_LOCKTABLE:
@@ -426,7 +426,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
analyzeDropRole(ast);
break;
case HiveParser.TOK_SHOW_ROLE_GRANT:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowRoleGrant(ast);
break;
case HiveParser.TOK_GRANT_ROLE:
@@ -439,7 +439,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
analyzeGrant(ast);
break;
case HiveParser.TOK_SHOW_GRANT:
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
analyzeShowGrant(ast);
break;
case HiveParser.TOK_REVOKE:
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
index 9b1c36e..c52647a 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
@@ -65,7 +65,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
sem.analyze(input, ctx);
sem.validate();
- ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ ctx.setResFile(ctx.getLocalTmpFileURI());
List> tasks = sem.getRootTasks();
Task extends Serializable> fetchTask = sem.getFetchTask();
if (tasks == null) {
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
index be0ad62..359f63f 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
@@ -87,8 +87,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
if (ts.tableHandle.isPartitioned()) {
partitions = (ts.partitions != null) ? ts.partitions : db.getPartitions(ts.tableHandle);
}
- String tmpfile = ctx.getLocalTmpFileURI();
- Path path = new Path(tmpfile, "_metadata");
+ Path path = new Path(ctx.getLocalTmpFileURI(), "_metadata");
EximUtil.createExportDump(FileSystem.getLocal(conf), path, ts.tableHandle, partitions);
Task extends Serializable> rTask = TaskFactory.get(new CopyWork(
path, new Path(toURI), false), conf);