diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
index 80f3a12..d5cedb1 100644
--- common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
+++ common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.conf;
import java.io.BufferedReader;
+import java.io.File;
import java.io.InputStreamReader;
import junit.framework.TestCase;
@@ -75,31 +76,37 @@ private void getCmdOutput(String logFile) throws Exception {
assertEquals(true, logCreated);
}
- private void RunTest(String cleanCmd, String findCmd, String logFile,
+ public void cleanLog(File logFile) {
+ if (logFile.exists()) {
+ logFile.delete();
+ }
+ File logFileDir = logFile.getParentFile();
+ if (logFileDir.exists()) {
+ logFileDir.delete();
+ }
+ }
+
+ private void RunTest(File logFile,
String hiveLog4jProperty, String hiveExecLog4jProperty) throws Exception {
// clean test space
- runCmd(cleanCmd);
+ cleanLog(logFile);
+ assertFalse(logFile + " should not exist", logFile.exists());
// config log4j with customized files
// check whether HiveConf initialize log4j correctly
configLog(hiveLog4jProperty, hiveExecLog4jProperty);
// check whether log file is created on test running
- runCmd(findCmd);
- getCmdOutput(logFile);
-
- // clean test space
- runCmd(cleanCmd);
+ assertTrue(logFile + " should exist", logFile.exists());
}
public void testHiveLogging() throws Exception {
- // customized log4j config log file to be: /tmp/TestHiveLogging/hiveLog4jTest.log
- String customLogPath = "/tmp/" + System.getProperty("user.name") + "-TestHiveLogging/";
+ // customized log4j config log file to be: /${test.tmp.dir}/TestHiveLogging/hiveLog4jTest.log
+ File customLogPath = new File(new File(System.getProperty("test.tmp.dir")),
+ System.getProperty("user.name") + "-TestHiveLogging/");
String customLogName = "hiveLog4jTest.log";
- String customLogFile = customLogPath + customLogName;
- String customCleanCmd = "rm -rf " + customLogFile;
- String customFindCmd = "find " + customLogPath + " -name " + customLogName;
- RunTest(customCleanCmd, customFindCmd, customLogFile,
+ File customLogFile = new File(customLogPath, customLogName);
+ RunTest(customLogFile,
"hive-log4j-test.properties", "hive-exec-log4j-test.properties");
}
}
diff --git common/src/test/resources/hive-exec-log4j-test.properties common/src/test/resources/hive-exec-log4j-test.properties
index 839a9ca..1e53f26 100644
--- common/src/test/resources/hive-exec-log4j-test.properties
+++ common/src/test/resources/hive-exec-log4j-test.properties
@@ -1,6 +1,6 @@
# Define some default values that can be overridden by system properties
hive.root.logger=INFO,FA
-hive.log.dir=/tmp/${user.name}-TestHiveLogging
+hive.log.dir=/${test.tmp.dir}/${user.name}-TestHiveLogging
hive.log.file=hiveExecLog4jTest.log
# Define the root logger to the system property "hadoop.root.logger".
diff --git common/src/test/resources/hive-log4j-test.properties common/src/test/resources/hive-log4j-test.properties
index 51acda2..0348325 100644
--- common/src/test/resources/hive-log4j-test.properties
+++ common/src/test/resources/hive-log4j-test.properties
@@ -1,6 +1,6 @@
# Define some default values that can be overridden by system properties
hive.root.logger=WARN,DRFA
-hive.log.dir=/tmp/${user.name}-TestHiveLogging
+hive.log.dir=${test.tmp.dir}/${user.name}-TestHiveLogging
hive.log.file=hiveLog4jTest.log
# Define the root logger to the system property "hadoop.root.logger".
diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
index 8868623..1606982 100644
--- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
+++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
@@ -64,7 +64,7 @@ public void testAlterTablePass() throws IOException, CommandNeedRetryException {
String tmpDir = System.getProperty("test.tmp.dir");
File dir = new File(tmpDir + "/hive-junit-" + System.nanoTime());
- response = hcatDriver.run("alter table " + tblName + " add partition (b='2') location '" + dir.getAbsolutePath() + "'");
+ response = hcatDriver.run("alter table " + tblName + " add partition (b='2') location '" + dir.toURI().getPath() + "'");
assertEquals(0, response.getResponseCode());
assertNull(response.getErrorMessage());
diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java
index 7162584..dfde522 100644
--- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java
+++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java
@@ -64,7 +64,7 @@ public void testReadWrite() throws Exception {
TestHCatLoader.executeStatementOnDriver("create external table " + tblName +
" (my_small_int smallint, my_tiny_int tinyint)" +
" row format delimited fields terminated by '\t' stored as textfile location '" +
- dataDir + "'", driver);
+ dataDir.toURI().getPath() + "'", driver);
TestHCatLoader.dropTable(tblName2, driver);
TestHCatLoader.createTable(tblName2, "my_small_int smallint, my_tiny_int tinyint", null, driver,
"textfile");
diff --git hcatalog/webhcat/java-client/pom.xml hcatalog/webhcat/java-client/pom.xml
index ebef9f1..48f63e4 100644
--- hcatalog/webhcat/java-client/pom.xml
+++ hcatalog/webhcat/java-client/pom.xml
@@ -47,6 +47,13 @@
+ org.apache.hive
+ hive-exec
+ ${project.version}
+ test-jar
+ test
+
+
org.apache.hive.hcatalog
hive-hcatalog-core
${project.version}
diff --git hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
index bc90ffe..6f5bff8 100644
--- hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
+++ hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
@@ -31,6 +31,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
import org.apache.hadoop.hive.metastore.api.PartitionEventType;
+import org.apache.hadoop.hive.ql.WindowsPathUtil;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
@@ -107,13 +108,17 @@ public static void tearDown() throws Exception {
@BeforeClass
public static void startMetaStoreServer() throws Exception {
+ hcatConf = new HiveConf(TestHCatClient.class);
+ if (Shell.WINDOWS) {
+ WindowsPathUtil.convertPathsFromWindowsToHdfs(hcatConf);
+ }
+
Thread t = new Thread(new RunMS(msPort));
t.start();
Thread.sleep(10000);
securityManager = System.getSecurityManager();
System.setSecurityManager(new NoExitSecurityManager());
- hcatConf = new HiveConf(TestHCatClient.class);
hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:"
+ msPort);
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
diff --git itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithCredentialProvider.java itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithCredentialProvider.java
index f9b698e..98e0fe8 100644
--- itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithCredentialProvider.java
+++ itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithCredentialProvider.java
@@ -81,7 +81,7 @@ public void testPassword() throws Exception {
conf.set("hadoop.security.credential.clear-text-fallback", "true");
// Set up CredentialProvider
- conf.set("hadoop.security.credential.provider.path", "jceks://file/" + tmpDir + "/test.jks");
+ conf.set("hadoop.security.credential.provider.path", "jceks://file/" + tmpDir.toURI().getPath() + "/test.jks");
// CredentialProvider/CredentialProviderFactory may not exist, depending on the version of
// hadoop-2 being used to build Hive. Use reflection to do the following lines
diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
index 95f1c39..303e306 100644
--- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
+++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
@@ -140,7 +140,7 @@ public void testStatsAfterCompactionPartTbl() throws Exception {
executeStatementOnDriver("CREATE EXTERNAL TABLE " + tblNameStg + "(a INT, b STRING)" +
" ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t' LINES TERMINATED BY '\\n'" +
" STORED AS TEXTFILE" +
- " LOCATION '" + stagingFolder.newFolder() + "'", driver);
+ " LOCATION '" + stagingFolder.newFolder().toURI().getPath() + "'", driver);
executeStatementOnDriver("load data local inpath '" + BASIC_FILE_NAME +
"' overwrite into table " + tblNameStg, driver);
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 6a18b9a..47a462a 100644
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -1805,6 +1805,7 @@ public void testDuplicateColumnNameOrder() throws SQLException {
ResultSet rs = stmt.executeQuery("SELECT 1 AS a, 2 AS a from " + tableName);
assertTrue(rs.next());
assertEquals(1, rs.getInt("a"));
+ rs.close();
}
diff --git serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
index c6b5cb6..cd5a0fa 100644
--- serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
+++ serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
@@ -80,6 +80,7 @@
private TypeInfoToSchema typeInfoToSchema;
+ private final String lineSeparator = System.getProperty("line.separator");
private String getAvroSchemaString(TypeInfo columnType) {
return typeInfoToSchema.convert(
@@ -383,7 +384,7 @@ public void createAvroStructSchema() throws IOException {
LOGGER.info("structTypeInfo is " + structTypeInfo);
final String specificSchema = IOUtils.toString(Resources.getResource("avro-struct.avsc")
- .openStream()).replace("\n", "");
+ .openStream()).replace(lineSeparator, "");
String expectedSchema = genSchema(
specificSchema);
@@ -414,7 +415,7 @@ public void createAvroNestedStructSchema() throws IOException {
superStructTypeInfo.setAllStructFieldTypeInfos(superTypeInfos);
final String specificSchema = IOUtils.toString(Resources.getResource("avro-nested-struct.avsc")
- .openStream()).replace("\n", "");
+ .openStream()).replace(lineSeparator, "");
String expectedSchema = genSchema(
specificSchema);
Assert.assertEquals("Test for nested struct's avro schema failed",