Index: ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
===================================================================
--- ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java (revision 1353213)
+++ ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java (working copy)
@@ -353,6 +353,13 @@
ctx.put("clusterMode", clusterMode);
ctx.put("hadoopVersion", hadoopVersion);
+ // Pass the windows flag to template to perform the windows specific
+ // path conversions. e.g. escape the backward slash etc.
+ // borrowed from Shell.WINDOWS. We can't refer the Shell class here
+ // because of build dependencies
+ boolean windows = System.getProperty("os.name").toLowerCase().startsWith("win");
+ ctx.put("windows", windows);
+
File outFile = new File(outDir, className + ".java");
FileWriter writer = new FileWriter(outFile);
t.merge(ctx, writer);
Index: build-common.xml
===================================================================
--- build-common.xml (revision 1353213)
+++ build-common.xml (working copy)
@@ -356,13 +356,18 @@
+
+
+
+
+
+ depends="test-conditions,gen-test,compile-test,test-jar,test-init">
@@ -374,7 +379,21 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -386,13 +405,13 @@
-
+
-
+
-
+
@@ -409,7 +428,7 @@
-
+
Index: build.xml
===================================================================
--- build.xml (revision 1353213)
+++ build.xml (working copy)
@@ -64,6 +64,11 @@
+
+
+
+
+
@@ -105,10 +110,10 @@
-
+
-
+
@@ -117,9 +122,9 @@
-
+
-
+
@@ -128,7 +133,7 @@
-
+
@@ -240,12 +245,26 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -280,6 +299,7 @@
@@ -319,7 +339,7 @@
-
+
@@ -361,6 +381,7 @@
@@ -533,8 +554,6 @@
-
-
Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1353213)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -38,6 +38,7 @@
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Shell;
/**
* Hive Configuration.
@@ -661,8 +662,10 @@
val = System.getenv("HADOOP_PREFIX");
}
// and if all else fails we can at least try /usr/bin/hadoop
- return (val == null ? File.separator + "usr" : val)
+ val = (val == null ? File.separator + "usr" : val)
+ File.separator + "bin" + File.separator + "hadoop";
+ // Launch hadoop command file on windows.
+ return val + (Shell.WINDOWS ? ".cmd" : "");
}
}
@@ -885,6 +888,14 @@
hiveJar = this.get(ConfVars.HIVEJAR.varname);
}
+ if (Shell.WINDOWS && hiveJar != null)
+ {
+ if (hiveJar.startsWith("/"))
+ {
+ hiveJar = hiveJar.substring(1);
+ }
+ }
+
if (auxJars == null) {
auxJars = this.get(ConfVars.HIVEAUXJARS.varname);
}
Index: contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java
===================================================================
--- contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java (revision 1353213)
+++ contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java (working copy)
@@ -24,6 +24,8 @@
import junit.framework.TestCase;
+import org.apache.hadoop.util.Shell;
+
/**
* TestGenericMR.
*
@@ -61,8 +63,7 @@
final StringWriter out = new StringWriter();
new GenericMR().map(new StringReader(in), out, identityMapper());
-
- assertEquals(in + "\n", out.toString());
+ assertEquals(in + "\n", getOsSpecificOutput(out.toString()));
}
public void testKVSplitMap() throws Exception {
@@ -79,7 +80,7 @@
}
});
- assertEquals(expected, out.toString());
+ assertEquals(expected, getOsSpecificOutput(out.toString()));
}
public void testIdentityReduce() throws Exception {
@@ -88,7 +89,7 @@
new GenericMR().reduce(new StringReader(in), out, identityReducer());
- assertEquals(in + "\n", out.toString());
+ assertEquals(in + "\n", getOsSpecificOutput(out.toString()));
}
public void testWordCountReduce() throws Exception {
@@ -111,7 +112,7 @@
final String expected = "hello\t3\nokay\t12\n";
- assertEquals(expected, out.toString());
+ assertEquals(expected, getOsSpecificOutput(out.toString()));
}
private Mapper identityMapper() {
@@ -134,4 +135,9 @@
}
};
}
+
+ private static String getOsSpecificOutput(String outStr){
+ assert outStr != null;
+ return Shell.WINDOWS ? outStr.replaceAll("\\r", "") : outStr;
+ }
}
Index: hbase-handler/src/test/templates/TestHBaseCliDriver.vm
===================================================================
--- hbase-handler/src/test/templates/TestHBaseCliDriver.vm (revision 1353213)
+++ hbase-handler/src/test/templates/TestHBaseCliDriver.vm (working copy)
@@ -56,7 +56,23 @@
if ("$clusterMode".equals("miniMR")) {
miniMR = true;
}
+ // Escape the backward slash in CanonicalPath if unittest runs on windows
+ // e.g. resultDir.getCanonicalPath() gets the absolute path of local
+ // directory. When we embed it directly in the generated java class it results
+ // in comipler error in windows. Reason : the canonical path contains backward
+ // slashes “C:\temp\etc\” and it is not a valid string in Java
+ // unless we escape the backward slashes.
+#if ($windows)
+ qt = new HBaseQTestUtil(
+ "$resultsDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ "$logDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ miniMR,
+ setup);
+#foreach ($qf in $qfiles)
+ qt.addFile("$qf.getCanonicalPath().replaceAll("\\", "\\\\")");
+#end
+#else
qt = new HBaseQTestUtil(
"$resultsDir.getCanonicalPath()",
"$logDir.getCanonicalPath()", miniMR, setup);
@@ -64,6 +80,7 @@
#foreach ($qf in $qfiles)
qt.addFile("$qf.getCanonicalPath()");
#end
+#end
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
e.printStackTrace();
Index: hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm
===================================================================
--- hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm (revision 1353213)
+++ hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm (working copy)
@@ -40,13 +40,25 @@
miniMR = true;
}
+#if ($windows)
qt = new HBaseQTestUtil(
+ "$resultsDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ "$logDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ miniMR,
+ setup);
+
+#foreach ($qf in $qfiles)
+ qt.addFile("$qf.getCanonicalPath().replaceAll("\\", "\\\\")");
+#end
+#else
+ qt = new HBaseQTestUtil(
"$resultsDir.getCanonicalPath()",
"$logDir.getCanonicalPath()", miniMR, setup);
#foreach ($qf in $qfiles)
qt.addFile("$qf.getCanonicalPath()");
#end
+#end
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
e.printStackTrace();
Index: ql/src/java/org/apache/hadoop/hive/ql/Context.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/Context.java (revision 1353213)
+++ ql/src/java/org/apache/hadoop/hive/ql/Context.java (working copy)
@@ -44,6 +44,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager;
+import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
/**
@@ -105,7 +106,11 @@
executionId);
// local tmp location is not configurable for now
- localScratchDir = System.getProperty("java.io.tmpdir")
+ localScratchDir = (Shell.WINDOWS)?
+ Path.SEPARATOR + "tmp" + Path.SEPARATOR
+ + System.getProperty("user.name") + Path.SEPARATOR
+ + executionId:
+ System.getProperty("java.io.tmpdir")
+ Path.SEPARATOR + System.getProperty("user.name") + Path.SEPARATOR
+ executionId;
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (revision 1353213)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (working copy)
@@ -44,6 +44,7 @@
import org.apache.hadoop.hive.ql.plan.CopyWork;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MoveWork;
+import org.apache.hadoop.util.Shell;
/**
* LoadSemanticAnalyzer.
@@ -81,7 +82,9 @@
// directory
if (!path.startsWith("/")) {
if (isLocal) {
- path = new Path(System.getProperty("user.dir"), path).toString();
+ String userdir = (Shell.WINDOWS) ?
+ System.getProperty("user.dir").substring(2) : System.getProperty("user.dir");
+ path = new Path(userdir, path).toString();
} else {
path = new Path(new Path("/user/" + System.getProperty("user.name")),
path).toString();
Index: ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (revision 1353213)
+++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (working copy)
@@ -33,7 +33,6 @@
import java.io.PrintStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
-import java.lang.UnsupportedOperationException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
@@ -46,7 +45,6 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
@@ -82,6 +80,7 @@
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.util.Shell;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.zookeeper.ZooKeeper;
@@ -94,6 +93,9 @@
private static final Log LOG = LogFactory.getLog("QTestUtil");
private String testWarehouse;
+ private final String tmpdir =
+ getOsSpecificUriString(System.getProperty("test.tmp.dir"), true);
+ private final Path tmppath = new Path(tmpdir);
private final String testFiles;
protected final String outDir;
protected final String logDir;
@@ -209,15 +211,48 @@
assert dfs != null;
assert mr != null;
// set fs.default.name to the uri of mini-dfs
- conf.setVar(HiveConf.ConfVars.HADOOPFS, dfs.getFileSystem().getUri().toString());
+ String dfsUriString = getOsSpecificUriString(
+ dfs.getFileSystem().getUri().toString(), true);
+ conf.setVar(HiveConf.ConfVars.HADOOPFS, dfsUriString);
// hive.metastore.warehouse.dir needs to be set relative to the mini-dfs
conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
- (new Path(dfs.getFileSystem().getUri().toString(),
+ (new Path(dfsUriString,
"/build/ql/test/data/warehouse/")).toString());
+ if(Shell.WINDOWS) {
+ // hive.exec.scratchdir needs to be set relative to the mini-dfs
+ String orgScratchDir = conf.getVar(HiveConf.ConfVars.SCRATCHDIR);
+ conf.setVar(HiveConf.ConfVars.SCRATCHDIR, getOsSpecificUriString(orgScratchDir, true));
+
+ String orgAuxJarFolder = conf.getVar(HiveConf.ConfVars.HIVEAUXJARS);
+ conf.setVar(HiveConf.ConfVars.HIVEAUXJARS,
+ getOsSpecificUriString("file://" + orgAuxJarFolder, true));
+ }
conf.setVar(HiveConf.ConfVars.HADOOPJT, "localhost:" + mr.getJobTrackerPort());
}
}
+ private String getOsSpecificUriString(String uriStr, boolean toHdfs) {
+ assert uriStr != null;
+ if(Shell.WINDOWS) {
+ // If the URI conversion is from Windows to HDFS then replace the '\' with '/'
+ return getOsSpecificUriString(
+ toHdfs == true ? uriStr.replace('\\', '/') : uriStr);
+ }
+
+ return uriStr;
+ }
+
+ private String getOsSpecificUriString(String uriStr) {
+ assert uriStr != null;
+ if(Shell.WINDOWS) {
+ // Remove the windows single drive letter & colon from absolute path.
+ return uriStr.replaceFirst("/[c-zC-Z]:", "/")
+ .replaceFirst("^[c-zC-Z]:", "");
+ }
+
+ return uriStr;
+ }
+
public QTestUtil(String outDir, String logDir, boolean miniMr, String hadoopVer)
throws Exception {
this.outDir = outDir;
@@ -231,7 +266,9 @@
if (miniMr) {
dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
FileSystem fs = dfs.getFileSystem();
- mr = new MiniMRCluster(4, fs.getUri().toString(), 1);
+ mr = new MiniMRCluster(4,
+ getOsSpecificUriString(fs.getUri().toString(), true),
+ 1);
}
initConf();
@@ -242,8 +279,7 @@
dataDir = new File(".").getAbsolutePath() + "/data/files";
}
- testFiles = dataDir.replace('\\', '/')
- .replace("c:", "");
+ testFiles = getOsSpecificUriString(conf.get("test.data.files"), true);
String ow = System.getProperty("test.output.overwrite");
if ((ow != null) && ow.equalsIgnoreCase("true")) {
@@ -285,7 +321,7 @@
// Look for a hint to not run a test on some Hadoop versions
Pattern pattern = Pattern.compile("-- (EX|IN)CLUDE_HADOOP_MAJOR_VERSIONS\\((.*)\\)");
-
+
boolean excludeQuery = false;
boolean includeQuery = false;
Set versionSet = new HashSet();
@@ -313,7 +349,7 @@
+ " contains more than one reference to (EX|IN)CLUDE_HADOOP_MAJOR_VERSIONS";
throw new UnsupportedOperationException(message);
}
-
+
String prefix = matcher.group(1);
if ("EX".equals(prefix)) {
excludeQuery = true;
@@ -330,7 +366,7 @@
qsb.append(line + "\n");
}
qMap.put(qf.getName(), qsb.toString());
-
+
if (excludeQuery && versionSet.contains(hadoopVer)) {
System.out.println("QTestUtil: " + qf.getName()
+ " EXCLUDE list contains Hadoop Version " + hadoopVer + ". Skipping...");
@@ -459,6 +495,7 @@
IgnoreKeyTextOutputFormat.class);
Path fpath;
+ String fpathString;
HashMap part_spec = new HashMap();
for (String ds : new String[] {"2008-04-08", "2008-04-09"}) {
for (String hr : new String[] {"11", "12"}) {
@@ -469,7 +506,8 @@
// db.createPartition(srcpart, part_spec);
fpath = new Path(testFiles, "kv1.txt");
// db.loadPartition(fpath, srcpart.getName(), part_spec, true);
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
+ fpathString = getOsSpecificUriString(fpath.toString());
+ runLoadCmd("LOAD DATA LOCAL INPATH '" + fpathString
+ "' OVERWRITE INTO TABLE srcpart PARTITION (ds='" + ds + "',hr='"
+ hr + "')");
}
@@ -481,7 +519,8 @@
// IgnoreKeyTextOutputFormat.class, 2, bucketCols);
for (String fname : new String[] {"srcbucket0.txt", "srcbucket1.txt"}) {
fpath = new Path(testFiles, fname);
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
+ fpathString = getOsSpecificUriString(fpath.toString());
+ runLoadCmd("LOAD DATA LOCAL INPATH '" + fpathString
+ "' INTO TABLE srcbucket");
}
@@ -492,7 +531,7 @@
for (String fname : new String[] {"srcbucket20.txt", "srcbucket21.txt",
"srcbucket22.txt", "srcbucket23.txt"}) {
fpath = new Path(testFiles, fname);
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
+ runLoadCmd("LOAD DATA LOCAL INPATH '" + getOsSpecificUriString(fpath.toString())
+ "' INTO TABLE srcbucket2");
}
@@ -520,25 +559,30 @@
// load the input data into the src table
fpath = new Path(testFiles, "kv1.txt");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src");
+ fpathString = getOsSpecificUriString(fpath.toString());
+ runLoadCmd("LOAD DATA LOCAL INPATH '" + fpathString + "' INTO TABLE src");
// load the input data into the src table
fpath = new Path(testFiles, "kv3.txt");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src1");
+ fpathString = getOsSpecificUriString(fpath.toString());
+ runLoadCmd("LOAD DATA LOCAL INPATH '" + fpathString + "' INTO TABLE src1");
// load the input data into the src_sequencefile table
fpath = new Path(testFiles, "kv1.seq");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
+ fpathString = getOsSpecificUriString(fpath.toString());
+ runLoadCmd("LOAD DATA LOCAL INPATH '" + fpathString
+ "' INTO TABLE src_sequencefile");
// load the input data into the src_thrift table
fpath = new Path(testFiles, "complex.seq");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
+ fpathString = getOsSpecificUriString(fpath.toString());
+ runLoadCmd("LOAD DATA LOCAL INPATH '" + fpathString
+ "' INTO TABLE src_thrift");
// load the json data into the src_json table
fpath = new Path(testFiles, "json.txt");
- runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
+ fpathString = getOsSpecificUriString(fpath.toString());
+ runLoadCmd("LOAD DATA LOCAL INPATH '" + fpathString
+ "' INTO TABLE src_json");
conf.setBoolean("hive.test.init.phase", false);
}
@@ -712,26 +756,9 @@
outfd.write(e.getMessage());
outfd.close();
- String cmdLine = "diff " + outf.getPath() + " " + expf;
- System.out.println(cmdLine);
-
- Process executor = Runtime.getRuntime().exec(cmdLine);
-
- StreamPrinter outPrinter = new StreamPrinter(
- executor.getInputStream(), null, SessionState.getConsole().getChildOutStream());
- StreamPrinter errPrinter = new StreamPrinter(
- executor.getErrorStream(), null, SessionState.getConsole().getChildErrStream());
-
- outPrinter.start();
- errPrinter.start();
-
- int exitVal = executor.waitFor();
-
+ int exitVal = executeDiffCommand(outf.getPath(), expf, false);
if (exitVal != 0 && overWrite) {
- System.out.println("Overwriting results");
- cmdLine = "cp " + outf.getPath() + " " + expf;
- executor = Runtime.getRuntime().exec(cmdLine);
- exitVal = executor.waitFor();
+ exitVal = overwriteResults(outf.getPath(), expf);
}
return exitVal;
@@ -751,26 +778,10 @@
outfd.write(tree.toStringTree());
outfd.close();
- String cmdLine = "diff " + outf.getPath() + " " + expf;
- System.out.println(cmdLine);
+ int exitVal = executeDiffCommand(outf.getPath(), expf, false);
- Process executor = Runtime.getRuntime().exec(cmdLine);
-
- StreamPrinter outPrinter = new StreamPrinter(
- executor.getInputStream(), null, SessionState.getConsole().getChildOutStream());
- StreamPrinter errPrinter = new StreamPrinter(
- executor.getErrorStream(), null, SessionState.getConsole().getChildErrStream());
-
- outPrinter.start();
- errPrinter.start();
-
- int exitVal = executor.waitFor();
-
if (exitVal != 0 && overWrite) {
- System.out.println("Overwriting results");
- cmdLine = "cp " + outf.getPath() + " " + expf;
- executor = Runtime.getRuntime().exec(cmdLine);
- exitVal = executor.waitFor();
+ exitVal = overwriteResults(outf.getPath(), expf);
}
return exitVal;
@@ -804,31 +815,10 @@
};
maskPatterns(patterns, outf.getPath());
- String[] cmdArray = new String[] {
- "diff",
- "-b",
- outf.getPath(),
- planFile
- };
- System.out.println(org.apache.commons.lang.StringUtils.join(cmdArray, ' '));
+ int exitVal = executeDiffCommand(outf.getPath(), planFile, true);
- Process executor = Runtime.getRuntime().exec(cmdArray);
-
- StreamPrinter outPrinter = new StreamPrinter(
- executor.getInputStream(), null, SessionState.getConsole().getChildOutStream());
- StreamPrinter errPrinter = new StreamPrinter(
- executor.getErrorStream(), null, SessionState.getConsole().getChildErrStream());
-
- outPrinter.start();
- errPrinter.start();
-
- int exitVal = executor.waitFor();
-
if (exitVal != 0 && overWrite) {
- System.out.println("Overwriting results");
- String cmdLine = "cp " + outf.getPath() + " " + planFile;
- executor = Runtime.getRuntime().exec(cmdLine);
- exitVal = executor.waitFor();
+ exitVal = overwriteResults(outf.getPath(), planFile);
}
return exitVal;
@@ -883,6 +873,10 @@
in = new BufferedReader(new FileReader(fname));
out = new BufferedWriter(new FileWriter(fname + ".orig"));
while (null != (line = in.readLine())) {
+ // Ignore the empty lines on windows
+ if(line.isEmpty() && Shell.WINDOWS) {
+ continue;
+ }
out.write(line);
out.write('\n');
}
@@ -954,13 +948,53 @@
"^Deleted.*",
};
maskPatterns(patterns, (new File(logDir, tname + ".out")).getPath());
+ int exitVal = executeDiffCommand((new File(logDir, tname + ".out")).getPath(),
+ outFileName, false);
- cmdArray = new String[] {
- "diff", "-a",
- (new File(logDir, tname + ".out")).getPath(),
- outFileName
- };
+ if (exitVal != 0 && overWrite) {
+ exitVal = overwriteResults((new File(logDir, tname + ".out")).getPath(), outFileName);
+ }
+ return exitVal;
+ }
+
+ private static int overwriteResults(String inFileName, String outFileName) throws Exception {
+ System.out.println("Overwriting results");
+ String[] cmdArray = new String[3];
+ cmdArray[0] = "cp";
+ cmdArray[1] = getQuotedString(inFileName);
+ cmdArray[2] = getQuotedString(outFileName);
+ Process executor = Runtime.getRuntime().exec(cmdArray);
+ return executor.waitFor();
+ }
+
+ private static int executeDiffCommand(String inFileName,
+ String outFileName,
+ boolean ignoreWhiteSpace) throws Exception {
+ ArrayList diffCommandArgs = new ArrayList();
+ diffCommandArgs.add("diff");
+
+ // Text file comparison
+ diffCommandArgs.add("-a");
+
+ // Ignore changes in the amount of white space
+ if(ignoreWhiteSpace || Shell.WINDOWS) {
+ diffCommandArgs.add("-b");
+ }
+
+ // Files created on Windows machines have different line endings
+ // than files created on Unix/Linux. Windows uses carriage return and line feed
+ // ("\r\n") as a line ending, whereas Unix uses just line feed ("\n").
+ // Also StringBuilder.toString(), Stream to String conversions adds extra
+ // spaces at the end of the line.
+ if (Shell.WINDOWS) {
+ diffCommandArgs.add("--strip-trailing-cr"); // Strip trailing carriage return on input
+ diffCommandArgs.add("-B"); // Ignore changes whose lines are all blank
+ }
+ // Add files to compare to the arguments list
+ diffCommandArgs.add(getQuotedString(inFileName));
+ diffCommandArgs.add(getQuotedString(outFileName));
+ String[] cmdArray =(String [])diffCommandArgs.toArray(new String [diffCommandArgs.size ()]);
System.out.println(org.apache.commons.lang.StringUtils.join(cmdArray, ' '));
Process executor = Runtime.getRuntime().exec(cmdArray);
@@ -973,19 +1007,11 @@
outPrinter.start();
errPrinter.start();
- int exitVal = executor.waitFor();
+ return executor.waitFor();
+ }
- if (exitVal != 0 && overWrite) {
- System.out.println("Overwriting results");
- cmdArray = new String[3];
- cmdArray[0] = "cp";
- cmdArray[1] = (new File(logDir, tname + ".out")).getPath();
- cmdArray[2] = outFileName;
- executor = Runtime.getRuntime().exec(cmdArray);
- exitVal = executor.waitFor();
- }
-
- return exitVal;
+ private static String getQuotedString(String str){
+ return Shell.WINDOWS ? String.format("\"%s\"", str) : str;
}
public ASTNode parseQuery(String tname) throws Exception {
@@ -1210,4 +1236,4 @@
+ "or try \"ant test ... -Dtest.silent=false\" to get more logs.");
System.err.flush();
}
-}
+}
\ No newline at end of file
Index: ql/src/test/templates/TestCliDriver.vm
===================================================================
--- ql/src/test/templates/TestCliDriver.vm (revision 1353213)
+++ ql/src/test/templates/TestCliDriver.vm (working copy)
@@ -48,7 +48,20 @@
if ("$clusterMode".equals("miniMR"))
miniMR = true;
hadoopVer = "$hadoopVersion";
+ // Escape the backward slash in CanonicalPath if unittest runs on windows
+ // e.g. resultDir.getCanonicalPath() gets the absolute path of local
+ // directory. When we embed it directly in the generated java class it results
+ // in comipler error in windows. Reason : the canonical path contains backward
+ // slashes “C:\temp\etc\” and it is not a valid string in Java
+ // unless we escape the backward slashes.
+#if ($windows)
+ qt = new QTestUtil("$resultsDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ "$logDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ miniMR,
+ hadoopVer);
+#else
qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", miniMR, hadoopVer);
+#end
// do a one time initialization
qt.cleanUp();
@@ -124,7 +137,11 @@
try {
System.out.println("Begin query: " + "$fname");
+#if ($windows)
+ qt.addFile("$qf.getCanonicalPath().replaceAll("\\", "\\\\")");
+#else
qt.addFile("$qf.getCanonicalPath()");
+#end
if (qt.shouldBeSkipped("$fname")) {
return;
Index: ql/src/test/templates/TestNegativeCliDriver.vm
===================================================================
--- ql/src/test/templates/TestNegativeCliDriver.vm (revision 1353213)
+++ ql/src/test/templates/TestNegativeCliDriver.vm (working copy)
@@ -41,7 +41,20 @@
if ("$clusterMode".equals("miniMR"))
miniMR = true;
hadoopVer = "$hadoopVersion";
+ // Escape the backward slash in CanonicalPath if unittest runs on windows
+ // e.g. resultDir.getCanonicalPath() gets the absolute path of local
+ // directory. When we embed it directly in the generated java class it results
+ // in comipler error in windows. Reason : the canonical path contains backward
+ // slashes “C:\temp\etc\” and it is not a valid string in Java
+ // unless we escape the backward slashes.
+#if ($windows)
+ qt = new QTestUtil("$resultsDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ "$logDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ miniMR,
+ hadoopVer);
+#else
qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", miniMR, hadoopVer);
+#end
// do a one time initialization
qt.cleanUp();
qt.createSources();
@@ -115,7 +128,11 @@
try {
System.out.println("Begin query: " + "$fname");
+#if ($windows)
+ qt.addFile("$qf.getCanonicalPath().replaceAll("\\", "\\\\")");
+#else
qt.addFile("$qf.getCanonicalPath()");
+#end
if (qt.shouldBeSkipped("$fname")) {
System.out.println("Test $fname skipped");
Index: ql/src/test/templates/TestParse.vm
===================================================================
--- ql/src/test/templates/TestParse.vm (revision 1353213)
+++ ql/src/test/templates/TestParse.vm (working copy)
@@ -37,7 +37,20 @@
if ("$clusterMode".equals("miniMR"))
miniMR = true;
String hadoopVer = "$hadoopVersion";
+ // Escape the backward slash in CanonicalPath if unittest runs on windows
+ // e.g. resultDir.getCanonicalPath() gets the absolute path of local
+ // directory. When we embed it directly in the generated java class it results
+ // in comipler error in windows. Reason : the canonical path contains backward
+ // slashes “C:\temp\etc\” and it is not a valid string in Java
+ // unless we escape the backward slashes.
+#if ($windows)
+ qt = new QTestUtil("$resultsDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ "$logDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ miniMR,
+ hadoopVer);
+#else
qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", miniMR, hadoopVer);
+#end
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -98,7 +111,11 @@
try {
System.out.println("Begin query: " + "$fname");
+#if ($windows)
+ qt.addFile("$qf.getCanonicalPath().replaceAll("\\", "\\\\")");
+#else
qt.addFile("$qf.getCanonicalPath()");
+#end
qt.init("$fname");
ASTNode tree = qt.parseQuery("$fname");
Index: ql/src/test/templates/TestParseNegative.vm
===================================================================
--- ql/src/test/templates/TestParseNegative.vm (revision 1353213)
+++ ql/src/test/templates/TestParseNegative.vm (working copy)
@@ -37,8 +37,20 @@
if ("$clusterMode".equals("miniMR"))
miniMR = true;
String hadoopVer = "$hadoopVersion";
- qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()",
- miniMR, hadoopVer);
+ // Escape the backward slash in CanonicalPath if unittest runs on windows
+ // e.g. resultDir.getCanonicalPath() gets the absolute path of local
+ // directory. When we embed it directly in the generated java class it results
+ // in comipler error in windows. Reason : the canonical path contains backward
+ // slashes “C:\temp\etc\” and it is not a valid string in Java
+ // unless we escape the backward slashes.
+#if ($windows)
+ qt = new QTestUtil("$resultsDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ "$logDir.getCanonicalPath().replaceAll("\\", "\\\\")",
+ miniMR,
+ hadoopVer);
+#else
+ qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", miniMR, hadoopVer);
+#end
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -98,7 +110,11 @@
try {
System.out.println("Begin query: " + "$fname");
+#if ($windows)
+ qt.addFile("$qf.getCanonicalPath().replaceAll("\\", "\\\\")");
+#else
qt.addFile("$qf.getCanonicalPath()");
+#end
qt.init("$fname");
ASTNode tree = qt.parseQuery("$fname");
Index: shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java
===================================================================
--- shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java (revision 1353213)
+++ shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.Shell;
/****************************************************************
* A FileSystem that can serve a given scheme/authority using some
@@ -42,14 +43,28 @@
protected String realAuthority;
protected URI realUri;
-
+
private Path swizzleParamPath(Path p) {
- return new Path (realScheme, realAuthority, p.toUri().getPath());
+ String pathUriString = p.toUri().toString();
+ if (Shell.WINDOWS) {
+ // Some of the file paths (Files with partition option) in HDFS '='
+ // but Windows file path doesn't support '=' so replace it with special string.
+ pathUriString = pathUriString.replaceAll("=", "------");
+ }
+ URI newPathUri = URI.create(pathUriString);
+ return new Path (realScheme, realAuthority, newPathUri.getPath());
}
private Path swizzleReturnPath(Path p) {
- return new Path (myScheme, myAuthority, p.toUri().getPath());
+ String pathUriString = p.toUri().toString();
+ if (Shell.WINDOWS) {
+ // Revert back the special string '------' with '=' when we do the reverse conversion
+ // from Windows path to HDFS
+ pathUriString = pathUriString.replaceAll("------", "=");
+ }
+ URI newPathUri = URI.create(pathUriString);
+ return new Path (myScheme, myAuthority, newPathUri.getPath());
}
private FileStatus swizzleFileStatus(FileStatus orig, boolean isParam) {
@@ -66,14 +81,14 @@
public ProxyFileSystem() {
throw new RuntimeException ("Unsupported constructor");
}
-
+
public ProxyFileSystem(FileSystem fs) {
throw new RuntimeException ("Unsupported constructor");
}
/**
* Create a proxy file system for fs.
- *
+ *
* @param fs FileSystem to create proxy for
* @param myUri URI to use as proxy. Only the scheme and authority from
* this are used right now
@@ -158,7 +173,7 @@
public boolean rename(Path src, Path dst) throws IOException {
return super.rename(swizzleParamPath(src), swizzleParamPath(dst));
}
-
+
@Override
public boolean delete(Path f, boolean recursive) throws IOException {
return super.delete(swizzleParamPath(f), recursive);
@@ -167,8 +182,8 @@
@Override
public boolean deleteOnExit(Path f) throws IOException {
return super.deleteOnExit(swizzleParamPath(f));
- }
-
+ }
+
@Override
public FileStatus[] listStatus(Path f) throws IOException {
FileStatus[] orig = super.listStatus(swizzleParamPath(f));
@@ -178,7 +193,7 @@
}
return ret;
}
-
+
@Override
public Path getHomeDirectory() {
return swizzleReturnPath(super.getHomeDirectory());
@@ -188,12 +203,12 @@
public void setWorkingDirectory(Path newDir) {
super.setWorkingDirectory(swizzleParamPath(newDir));
}
-
+
@Override
public Path getWorkingDirectory() {
return swizzleReturnPath(super.getWorkingDirectory());
}
-
+
@Override
public boolean mkdirs(Path f, FsPermission permission) throws IOException {
return super.mkdirs(swizzleParamPath(f), permission);
@@ -206,14 +221,14 @@
}
@Override
- public void copyFromLocalFile(boolean delSrc, boolean overwrite,
+ public void copyFromLocalFile(boolean delSrc, boolean overwrite,
Path[] srcs, Path dst)
throws IOException {
super.copyFromLocalFile(delSrc, overwrite, srcs, swizzleParamPath(dst));
}
-
+
@Override
- public void copyFromLocalFile(boolean delSrc, boolean overwrite,
+ public void copyFromLocalFile(boolean delSrc, boolean overwrite,
Path src, Path dst)
throws IOException {
super.copyFromLocalFile(delSrc, overwrite, src, swizzleParamPath(dst));
@@ -251,7 +266,7 @@
public FileChecksum getFileChecksum(Path f) throws IOException {
return super.getFileChecksum(swizzleParamPath(f));
}
-
+
@Override
public void setOwner(Path p, String username, String groupname
) throws IOException {
@@ -270,4 +285,4 @@
super.setPermission(swizzleParamPath(p), permission);
}
}
-
+
Index: shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java
===================================================================
--- shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java (revision 1353213)
+++ shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java (working copy)
@@ -18,19 +18,17 @@
package org.apache.hadoop.fs;
-import java.io.*;
+import java.io.IOException;
import java.net.URI;
-import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.Shell;
/****************************************************************
* A Proxy for LocalFileSystem
*
* Serves uri's corresponding to 'pfile:///' namespace with using
- * a LocalFileSystem
+ * a LocalFileSystem
*****************************************************************/
public class ProxyLocalFileSystem extends FilterFileSystem {
@@ -50,10 +48,21 @@
// create a proxy for the local filesystem
// the scheme/authority serving as the proxy is derived
// from the supplied URI
+ String scheme = name.getScheme();
+ String nameUriString = name.toString();
+ if (Shell.WINDOWS) {
+ // Replace the encoded backward slash with forward slash
+ // Remove the windows drive letter
+ // replace the '=' with special string '------' to handle the unsupported char '=' in windows.
+ nameUriString = nameUriString.replaceAll("%5C", "/")
+ .replaceFirst("/[c-zC-Z]:", "/")
+ .replaceFirst("^[c-zC-Z]:", "")
+ .replaceAll("=", "------");
+ name = URI.create(nameUriString);
+ }
- String scheme = name.getScheme();
String authority = name.getAuthority() != null ? name.getAuthority() : "";
- String proxyUriString = name + "://" + authority + "/";
+ String proxyUriString = nameUriString + "://" + authority + "/";
fs = new ProxyFileSystem(localFs, URI.create(proxyUriString));
fs.initialize(name, conf);
Index: testutils/hadoop.cmd
===================================================================
--- testutils/hadoop.cmd (revision 0)
+++ testutils/hadoop.cmd (working copy)
@@ -0,0 +1,253 @@
+@echo off
+@rem Licensed to the Apache Software Foundation (ASF) under one or more
+@rem contributor license agreements. See the NOTICE file distributed with
+@rem this work for additional information regarding copyright ownership.
+@rem The ASF licenses this file to You under the Apache License, Version 2.0
+@rem (the "License"); you may not use this file except in compliance with
+@rem the License. You may obtain a copy of the License at
+@rem
+@rem http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+
+
+@rem The Hadoop command script
+@rem
+@rem Environment Variables
+@rem
+@rem JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
+@rem
+@rem HADOOP_CLASSPATH Extra Java CLASSPATH entries.
+@rem
+@rem HADOOP_HEAPSIZE The maximum amount of heap to use, in MB.
+@rem Default is 1000.
+@rem
+@rem HADOOP_OPTS Extra Java runtime options.
+@rem
+@rem HADOOP_NAMENODE_OPTS These options are added to HADOOP_OPTS
+@rem HADOOP_CLIENT_OPTS when the respective command is run.
+@rem HADOOP_{COMMAND}_OPTS etc HADOOP_JT_OPTS applies to JobTracker
+@rem for e.g. HADOOP_CLIENT_OPTS applies to
+@rem more than one command (fs, dfs, fsck,
+@rem dfsadmin etc)
+@rem
+@rem HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
+@rem
+@rem HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
+@rem
+
+if not defined HADOOP_BIN_PATH (
+ set HADOOP_BIN_PATH=%~dp0
+)
+
+if "%HADOOP_BIN_PATH:~-1%" == "\" (
+ set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
+)
+call :updatepath %HADOOP_BIN_PATH%
+
+set BIN=%~dp0
+for %%i in (%BIN%.) do (
+ set BIN=%%~dpi
+)
+if "%BIN:~-1%" == "\" (
+ set BIN=%BIN:~0,-1%
+)
+
+
+@rem
+@rem setup java environment variables
+@rem
+
+if not defined JAVA_HOME (
+ echo Error: JAVA_HOME is not set.
+ goto :eof
+)
+
+if not exist %JAVA_HOME%\bin\java.exe (
+ echo Error: JAVA_HOME is incorrectly set.
+ goto :eof
+)
+
+set JAVA=%JAVA_HOME%\bin\java
+set JAVA_HEAP_MAX=-Xmx1000m
+
+@rem
+@rem check envvars which might override default args
+@rem
+
+if defined HADOOP_HEAPSIZE (
+ set JAVA_HEAP_MAX=-Xmx%HADOOP_HEAPSIZE%m
+)
+
+@rem
+@rem CLASSPATH initially contains %HADOOP_CONF_DIR%
+@rem
+
+set CLASSPATH=%HADOOP_CONF_DIR%
+set CLASSPATH=%CLASSPATH%;%JAVA_HOME%\lib\tools.jar
+
+
+set BUILD_ROOT="%BIN%"/build
+
+
+if not defined HIVE_HADOOP_TEST_CLASSPATH (
+ @echo Error: HIVE_HADOOP_TEST_CLASSPATH not defined.
+ goto :eof
+)
+
+
+
+set CLASSPATH=%CLASSPATH%;%HIVE_HADOOP_TEST_CLASSPATH%
+if not exist %BUILD_ROOT%/test/hadoop/logs (
+ mkdir %BUILD_ROOT%/test/hadoop/logs
+)
+
+@rem
+@rem add user-specified CLASSPATH last
+@rem
+
+if defined HADOOP_CLASSPATH (
+ set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%
+)
+
+if not defined HADOOP_LOG_DIR (
+ set HADOOP_LOG_DIR=%BUILD_ROOT%\logs
+)
+
+if not defined HADOOP_LOGFILE (
+ set HADOOP_LOGFILE=hadoop.log
+)
+
+if not defined HADOOP_ROOT_LOGGER (
+ set HADOOP_ROOT_LOGGER=INFO,console,DRFA
+)
+
+@rem
+@rem default policy file for service-level authorization
+@rem
+
+if not defined HADOOP_POLICYFILE (
+ set HADOOP_POLICYFILE=hadoop-policy.xml
+)
+set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.log.dir=%HADOOP_LOG_DIR%
+set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.log.file=%HADOOP_LOGFILE%
+set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.root.logger=%HADOOP_ROOT_LOGGER%
+
+if defined HADOOP_PREFIX (
+ set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.home.dir=%HADOOP_PREFIX%
+)
+
+if defined HADOOP_IDENT_STRING (
+ set HADOOP_OPTS=%$HADOOP_OPTS% -Dhadoop.id.str=%HADOOP_IDENT_STRING%
+)
+
+if defined JAVA_LIBRARY_PATH (
+ set HADOOP_OPTS=%HADOOP_OPTS% -Djava.library.path=%JAVA_LIBRARY_PATH%
+)
+set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.policy.file=%HADOOP_POLICYFILE%
+
+@rem Disable ipv6 as it can cause issues
+set HADOOP_OPTS=%HADOOP_OPTS% -Djava.net.preferIPv4Stack=true
+
+:main
+ setlocal enabledelayedexpansion
+
+ set hadoop-command=%1
+ if not defined hadoop-command (
+ goto print_usage
+ )
+
+ call :make_command_arguments %*
+ set corecommands=fs version jar distcp daemonlog archive
+ for %%i in ( %corecommands% ) do (
+ if %hadoop-command% == %%i set corecommand=true
+ )
+ if defined corecommand (
+ call :%hadoop-command%
+ ) else (
+ set CLASSPATH=%CLASSPATH%;%CD%
+ set CLASS=%hadoop-command%
+ )
+ set path=%HADOOP_BIN_PATH%;%windir%\system32;%windir%
+ call %JAVA% %JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hadoop-command-arguments%
+
+ goto :eof
+
+:version
+ set CLASS=org.apache.hadoop.util.VersionInfo
+ set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
+ goto :eof
+
+:jar
+ set CLASS=org.apache.hadoop.util.RunJar
+ goto :eof
+
+:distcp
+ set CLASS=org.apache.hadoop.tools.DistCp
+ set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
+ set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
+ goto :eof
+
+:daemonlog
+ set CLASS=org.apache.hadoop.log.LogLevel
+ set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
+ goto :eof
+
+:archive
+ set CLASS=org.apache.hadoop.tools.HadoopArchives
+ set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
+ set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
+ goto :eof
+
+:updatepath
+ set path_to_add=%*
+ set current_path_comparable=%path:(x86)=%
+ set current_path_comparable=%current_path_comparable: =_%
+ set path_to_add_comparable=%path_to_add:(x86)=%
+ set path_to_add_comparable=%path_to_add_comparable: =_%
+ for %%i in ( %current_path_comparable% ) do (
+ if /i "%%i" == "%path_to_add_comparable%" (
+ set path_to_add_exist=true
+ )
+ )
+ set system_path_comparable=
+ set path_to_add_comparable=
+ if not defined path_to_add_exist path=%path_to_add%;%path%
+ set path_to_add=
+ goto :eof
+
+:make_command_arguments
+ if "%2" == "" goto :eof
+ set _count=0
+ set _shift=1
+ for %%i in (%*) do (
+ set /a _count=!_count!+1
+ if !_count! GTR %_shift% (
+ if not defined _arguments (
+ set _arguments=%%i
+ ) else (
+ set _arguments=!_arguments! %%i
+ )
+ )
+ )
+
+ set hadoop-command-arguments=%_arguments%
+ goto :eof
+
+:print_usage
+ @echo Usage: hadoop COMMAND
+ @echo where COMMAND is one of:
+ @echo fs run a generic filesystem user client
+ @echo version print the version
+ @echo jar ^ run a jar file
+ @echo.
+ @echo distcp ^ ^ copy file or directories recursively
+ @echo archive -archiveName NAME ^* ^ create a hadoop archive
+ @echo daemonlog get/set the log level for each daemon
+ @echo Most commands print help when invoked w/o parameters.
+
+endlocal