diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index d6e33f8..6fcc510 100644
--- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -258,7 +258,7 @@ public void initConf() throws Exception {
mr.setupConfiguration(conf);
// set fs.default.name to the uri of mini-dfs
- String dfsUriString = getHdfsUriString(dfs.getFileSystem().getUri().toString());
+ String dfsUriString = WindowsPathUtil.getHdfsUriString(dfs.getFileSystem().getUri().toString());
conf.setVar(HiveConf.ConfVars.HADOOPFS, dfsUriString);
// hive.metastore.warehouse.dir needs to be set relative to the mini-dfs
conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
@@ -269,46 +269,10 @@ public void initConf() throws Exception {
// Windows paths should be converted after MiniMrShim.setupConfiguration()
// since setupConfiguration may overwrite configuration values.
if (Shell.WINDOWS) {
- convertPathsFromWindowsToHdfs();
+ WindowsPathUtil.convertPathsFromWindowsToHdfs(conf);
}
}
- private void convertPathsFromWindowsToHdfs() {
- // Following local paths are used as HDFS paths in unit tests.
- // It works well in Unix as the path notation in Unix and HDFS is more or less same.
- // But when it comes to Windows, drive letter separator ':' & backslash '\" are invalid
- // characters in HDFS so we need to converts these local paths to HDFS paths before using them
- // in unit tests.
-
- // hive.exec.scratchdir needs to be set relative to the mini-dfs
- String orgWarehouseDir = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
- conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, getHdfsUriString(orgWarehouseDir));
-
- String orgTestTempDir = System.getProperty("test.tmp.dir");
- System.setProperty("test.tmp.dir", getHdfsUriString(orgTestTempDir));
-
- String orgScratchDir = conf.getVar(HiveConf.ConfVars.SCRATCHDIR);
- conf.setVar(HiveConf.ConfVars.SCRATCHDIR, getHdfsUriString(orgScratchDir));
-
- if (miniMr) {
- String orgAuxJarFolder = conf.getAuxJars();
- conf.setAuxJars(getHdfsUriString("file://" + orgAuxJarFolder));
- }
- }
-
- private String getHdfsUriString(String uriStr) {
- assert uriStr != null;
- if(Shell.WINDOWS) {
- // If the URI conversion is from Windows to HDFS then replace the '\' with '/'
- // and remove the windows single drive letter & colon from absolute path.
- return uriStr.replace('\\', '/')
- .replaceFirst("/[c-zC-Z]:", "/")
- .replaceFirst("^[c-zC-Z]:", "");
- }
-
- return uriStr;
- }
-
public enum MiniClusterType {
mr,
tez,
@@ -361,7 +325,7 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
if (clusterType != MiniClusterType.none) {
dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null);
FileSystem fs = dfs.getFileSystem();
- String uriString = getHdfsUriString(fs.getUri().toString());
+ String uriString = WindowsPathUtil.getHdfsUriString(fs.getUri().toString());
if (clusterType == MiniClusterType.tez) {
mr = shims.getMiniTezCluster(conf, 4, uriString, 1);
} else {
diff --git pom.xml pom.xml
index 426dca8..b40f2c3 100644
--- pom.xml
+++ pom.xml
@@ -62,6 +62,7 @@
${maven.test.classpath}
${project.build.directory}/tmp
+ file://${test.tmp.dir}
${project.build.directory}/warehouse
pfile://
@@ -754,6 +755,7 @@
${basedir}/${hive.path.to.root}/data/files
${basedir}/${hive.path.to.root}/data/files
${test.tmp.dir}
+ ${test.tmp.dir.uri}
${test.dfs.mkdir}
${test.output.overwrite}
${test.warehouse.scheme}${test.warehouse.dir}
@@ -1043,6 +1045,7 @@
${basedir}/${hive.path.to.root}/testutils/hadoop.cmd
${project.build.directory}/deplibs/*
+ file:///${test.tmp.dir}
diff --git ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java
index 131260b..294a3dd 100644
--- ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java
+++ ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java
@@ -24,17 +24,26 @@
public class WindowsPathUtil {
public static void convertPathsFromWindowsToHdfs(HiveConf conf){
+ // Following local paths are used as HDFS paths in unit tests.
+ // It works well in Unix as the path notation in Unix and HDFS is more or less same.
+ // But when it comes to Windows, drive letter separator ':' & backslash '\" are invalid
+ // characters in HDFS so we need to converts these local paths to HDFS paths before using them
+ // in unit tests.
+
String orgWarehouseDir = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, getHdfsUriString(orgWarehouseDir));
String orgTestTempDir = System.getProperty("test.tmp.dir");
System.setProperty("test.tmp.dir", getHdfsUriString(orgTestTempDir));
+ String orgTestWarehouseDir = System.getProperty("test.warehouse.dir");
+ System.setProperty("test.warehouse.dir", getHdfsUriString(orgTestWarehouseDir));
+
String orgScratchDir = conf.getVar(HiveConf.ConfVars.SCRATCHDIR);
conf.setVar(HiveConf.ConfVars.SCRATCHDIR, getHdfsUriString(orgScratchDir));
}
- private static String getHdfsUriString(String uriStr) {
+ public static String getHdfsUriString(String uriStr) {
assert uriStr != null;
if(Shell.WINDOWS) {
// If the URI conversion is from Windows to HDFS then replace the '\' with '/'
diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index b548672..4803f41 100644
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -71,9 +71,9 @@
static HiveConf conf;
- private static final String tmpdir = System.getProperty("test.tmp.dir");
+ private static final String tmpdir;
private static final Log LOG = LogFactory.getLog(TestExecDriver.class);
- private static final Path tmppath = new Path(tmpdir);
+ private static final Path tmppath;
private static Hive db;
private static FileSystem fs;
@@ -86,6 +86,8 @@
if (Shell.WINDOWS) {
WindowsPathUtil.convertPathsFromWindowsToHdfs(conf);
}
+ tmpdir = System.getProperty("test.tmp.dir");
+ tmppath = new Path(tmpdir);
fs = FileSystem.get(conf);
if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) {
diff --git ql/src/test/queries/clientpositive/scriptfile1_win.q ql/src/test/queries/clientpositive/scriptfile1_win.q
index 0008ae5..fd19d84 100644
--- ql/src/test/queries/clientpositive/scriptfile1_win.q
+++ ql/src/test/queries/clientpositive/scriptfile1_win.q
@@ -3,7 +3,7 @@ set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
CREATE TABLE dest1(key INT, value STRING);
-ADD FILE src/test/scripts/testgrep_win.bat;
+ADD FILE ../../ql/src/test/scripts/testgrep_win.bat;
FROM (
FROM src
diff --git ql/src/test/queries/clientpositive/tez_insert_overwrite_local_directory_1.q ql/src/test/queries/clientpositive/tez_insert_overwrite_local_directory_1.q
index d7a652f..e06403a 100644
--- ql/src/test/queries/clientpositive/tez_insert_overwrite_local_directory_1.q
+++ ql/src/test/queries/clientpositive/tez_insert_overwrite_local_directory_1.q
@@ -1,5 +1,5 @@
insert overwrite local directory '${system:test.tmp.dir}/tez_local_src_table_1'
select * from src order by key limit 10 ;
-dfs -cat file:${system:test.tmp.dir}/tez_local_src_table_1/000000_0 ;
+dfs -cat ${system:test.tmp.dir.uri}/tez_local_src_table_1/* ;
-dfs -rmr file:${system:test.tmp.dir}/tez_local_src_table_1/ ;
+dfs -rmr ${system:test.tmp.dir.uri}/tez_local_src_table_1/ ;
diff --git ql/src/test/results/clientpositive/scriptfile1_win.q.out ql/src/test/results/clientpositive/scriptfile1_win.q.out
index dfaa057..1f84bd3 100644
--- ql/src/test/results/clientpositive/scriptfile1_win.q.out
+++ ql/src/test/results/clientpositive/scriptfile1_win.q.out
@@ -2,10 +2,12 @@ PREHOOK: query: -- INCLUDE_OS_WINDOWS
CREATE TABLE dest1(key INT, value STRING)
PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
POSTHOOK: query: -- INCLUDE_OS_WINDOWS
CREATE TABLE dest1(key INT, value STRING)
POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
POSTHOOK: Output: default@dest1
PREHOOK: query: FROM (
FROM src