diff --git ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java new file mode 100644 index 0000000..9cb96e7 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.util.Shell; + +public class WindowsPathUtil { + + public static void convertPathsFromWindowsToHdfs(HiveConf conf){ + if(Shell.WINDOWS){ + String orgWarehouseDir = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE); + conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, getHdfsUriString(orgWarehouseDir)); + + String orgTestTempDir = System.getProperty("test.tmp.dir"); + System.setProperty("test.tmp.dir", getHdfsUriString(orgTestTempDir)); + + String orgTestDataDir = System.getProperty("test.src.data.dir"); + System.setProperty("test.src.data.dir", getHdfsUriString(orgTestDataDir)); + + String orgScratchDir = conf.getVar(HiveConf.ConfVars.SCRATCHDIR); + conf.setVar(HiveConf.ConfVars.SCRATCHDIR, getHdfsUriString(orgScratchDir)); + } + } + + private static String getHdfsUriString(String uriStr) { + assert uriStr != null; + if(Shell.WINDOWS) { + // If the URI conversion is from Windows to HDFS then replace the '\' with '/' + // and remove the windows single drive letter & colon from absolute path. + return uriStr.replace('\\', '/') + .replaceFirst("/[c-zC-Z]:", "/") + .replaceFirst("^[c-zC-Z]:", ""); + } + return uriStr; + } +} diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 627f084..5931ea5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.WindowsPathUtil; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Table; @@ -71,6 +72,9 @@ static { try { conf = new HiveConf(ExecDriver.class); + + //convert possible incompatible Windows path in config + WindowsPathUtil.convertPathsFromWindowsToHdfs(conf); fs = FileSystem.get(conf); if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) { diff --git ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java index 6490b89..fb2284d 100644 --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.WindowsPathUtil; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.thrift.TException; @@ -60,6 +61,7 @@ protected void setUp() throws Exception { super.setUp(); hive = Hive.get(); + WindowsPathUtil.convertPathsFromWindowsToHdfs(hive.getConf()); checker = new HiveMetaStoreChecker(hive); partCols = new ArrayList();