diff --git common/src/java/org/apache/hadoop/hive/common/FileUtils.java common/src/java/org/apache/hadoop/hive/common/FileUtils.java index f71bc3c..a9ec73c 100644 --- common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -24,7 +24,6 @@ import java.net.URISyntaxException; import java.security.AccessControlException; import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; import java.util.BitSet; import java.util.List; @@ -34,19 +33,16 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatus; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.Shell; - /** * Collection of file manipulation utilities common across Hive. @@ -229,7 +225,7 @@ public static String makeListBucketingDirName(List lbCols, List charToEscape.set(c); } - if(Shell.WINDOWS){ + if (Environments.WINDOWS) { //On windows, following chars need to be escaped as well char [] winClist = {' ', '<','>','|'}; for (char c : winClist) { diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 7f4afd9..eecd133 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -44,10 +44,10 @@ import org.apache.hadoop.hive.conf.Validator.PatternSet; import org.apache.hadoop.hive.conf.Validator.RangeValidator; import org.apache.hadoop.hive.conf.Validator.StringSet; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.Shell; import org.apache.hive.common.HiveCompat; /** @@ -1859,7 +1859,7 @@ private static String findHadoopBinary() { val = (val == null ? File.separator + "usr" : val) + File.separator + "bin" + File.separator + "hadoop"; // Launch hadoop command file on windows. - return val + (Shell.WINDOWS ? ".cmd" : ""); + return val + (Environments.WINDOWS ? ".cmd" : ""); } public String getDefaultValue() { diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java index fce46de..4abc585 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.conf; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.HiveTestUtils; import org.junit.Assert; import org.junit.Test; @@ -36,7 +36,7 @@ public void testHiveSitePath() throws Exception { String expectedPath = HiveTestUtils.getFileFromClasspath("hive-site.xml"); String hiveSiteLocation = HiveConf.getHiveSiteLocation().getPath(); - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { // Do case-insensitive comparison on Windows, as drive letter can have different case. expectedPath = expectedPath.toLowerCase(); hiveSiteLocation = hiveSiteLocation.toLowerCase(); diff --git contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java index 5fedb3d..764f42d 100644 --- contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java +++ contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java @@ -24,7 +24,7 @@ import junit.framework.TestCase; -import org.apache.hadoop.util.Shell; +import org.apache.hadoop.hive.shims.Environments; /** * TestGenericMR. @@ -138,6 +138,6 @@ public void reduce(String key, Iterator records, Output output) private static String getOsSpecificOutput(String outStr){ assert outStr != null; - return Shell.WINDOWS ? outStr.replaceAll("\\r", "") : outStr; + return Environments.WINDOWS ? outStr.replaceAll("\\r", "") : outStr; } } diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java index be7134f..4e08762 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; @@ -53,7 +54,6 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.util.Shell; import org.apache.hive.hcatalog.NoExitSecurityManager; import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; import org.apache.hive.hcatalog.data.DefaultHCatRecord; @@ -160,7 +160,7 @@ public void testPartitionPublish() throws Exception { Assert.assertTrue(table != null); // In Windows, we cannot remove the output directory when job fail. See // FileOutputCommitterContainer.abortJob - if (!Shell.WINDOWS) { + if (!Environments.WINDOWS) { Path path = new Path(table.getSd().getLocation() + "/part1=p1value1/part0=p0value1"); Assert.assertFalse(path.getFileSystem(conf).exists(path)); diff --git hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java index f9f7b04..c198498 100644 --- hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java +++ hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; import org.apache.hive.hcatalog.common.HCatConstants; @@ -60,8 +61,6 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertArrayEquals; -import org.apache.hadoop.util.Shell; - public class TestHCatClient { private static final Logger LOG = LoggerFactory.getLogger(TestHCatClient.class); private static final String msPort = "20101"; @@ -127,7 +126,7 @@ public static void startMetaStoreServer() throws Exception { System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); } public static String fixPath(String path) { - if(!Shell.WINDOWS) { + if (!Environments.WINDOWS) { return path; } String expectedDir = path.replaceAll("\\\\", "/"); diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java index 5b4fd9b..f489231 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java @@ -39,7 +39,7 @@ import org.apache.commons.exec.PumpStreamHandler; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.util.Shell; +import org.apache.hadoop.hive.shims.Environments; class StreamOutputWriter extends Thread { @@ -175,7 +175,7 @@ private ExecBean auxRun(String program, List args, Map e LOG.info("Running: " + cmd); ExecBean res = new ExecBean(); - if(Shell.WINDOWS){ + if (Environments.WINDOWS) { //The default executor is sometimes causing failure on windows. hcat // command sometimes returns non zero exit status with it. It seems // to hit some race conditions on windows. diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java index fb9d767..0dd70fe 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim; import org.apache.hadoop.io.NullWritable; @@ -31,7 +32,6 @@ import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.apache.hive.hcatalog.templeton.BadParam; import org.apache.hive.hcatalog.templeton.LauncherDelegator; @@ -166,7 +166,7 @@ private static void handleTokenFile(List jarArgsList, String tokenPlaceH //Token is available, so replace the placeholder tokenFile = tokenFile.replaceAll("\"", ""); String tokenArg = tokenProperty + "=" + tokenFile; - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { try { tokenArg = TempletonUtils.quoteForWindows(tokenArg); } catch (BadParam e) { diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java index d2dec54..79c7f7b 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java @@ -41,8 +41,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.apache.hive.hcatalog.templeton.UgiFactory; import org.apache.hive.hcatalog.templeton.BadParam; @@ -316,7 +316,7 @@ public static int fetchUrl(URL url) // equal sign might be lost as part of the cmd script processing if not // under quotes). public static String quoteForWindows(String param) throws BadParam { - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { if (param != null && param.length() > 0) { String nonQuotedPart = param; boolean addQuotes = true; @@ -342,7 +342,7 @@ public static String quoteForWindows(String param) throws BadParam { } public static void addCmdForWindows(ArrayList args) { - if(Shell.WINDOWS){ + if (Environments.WINDOWS) { args.add("cmd"); args.add("/c"); args.add("call"); diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index af4a3e5..af661c2 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -43,7 +43,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Deque; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; @@ -75,8 +74,6 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.exec.vector.util.AllVectorTypesRecord; -import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Table; @@ -87,17 +84,10 @@ import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer; -import org.apache.hadoop.hive.serde2.thrift.test.Complex; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hadoop.mapred.SequenceFileInputFormat; -import org.apache.hadoop.mapred.SequenceFileOutputFormat; -import org.apache.hadoop.mapred.TextInputFormat; -import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.StreamPrinter; -import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.tools.ant.BuildException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; @@ -282,7 +272,7 @@ public void initConf() throws Exception { // Windows paths should be converted after MiniMrShim.setupConfiguration() // since setupConfiguration may overwrite configuration values. - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { WindowsPathUtil.convertPathsFromWindowsToHdfs(conf); } } @@ -526,14 +516,13 @@ private boolean checkOSExclude(String fileName, String query){ String prefix = matcher.group(1); if ("EX".equals(prefix)) { //windows is to be exluded - if(Shell.WINDOWS){ + if (Environments.WINDOWS) { System.out.println("Due to the OS being windows " + "adding the query " + fileName + " to the set of tests to skip"); return true; } - } - else if(!Shell.WINDOWS){ + } else if (!Environments.WINDOWS) { //non windows to be exluded System.out.println("Due to the OS not being windows " + "adding the query " + fileName + @@ -1268,7 +1257,7 @@ private static int executeDiffCommand(String inFileName, diffCommandArgs.add("-a"); // Ignore changes in the amount of white space - if (ignoreWhiteSpace || Shell.WINDOWS) { + if (ignoreWhiteSpace || Environments.WINDOWS) { diffCommandArgs.add("-b"); } @@ -1277,7 +1266,7 @@ private static int executeDiffCommand(String inFileName, // ("\r\n") as a line ending, whereas Unix uses just line feed ("\n"). // Also StringBuilder.toString(), Stream to String conversions adds extra // spaces at the end of the line. - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { diffCommandArgs.add("--strip-trailing-cr"); // Strip trailing carriage return on input diffCommandArgs.add("-B"); // Ignore changes whose lines are all blank } @@ -1350,7 +1339,7 @@ private static int executeCmd(String[] args, String outFile, String errFile) thr } private static String getQuotedString(String str){ - return Shell.WINDOWS ? String.format("\"%s\"", str) : str; + return Environments.WINDOWS ? String.format("\"%s\"", str) : str; } public ASTNode parseQuery(String tname) throws Exception { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java index 5b857e2..f9f7aa0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java @@ -43,12 +43,12 @@ import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.Reporter; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; /** @@ -213,7 +213,7 @@ public File getAbsolutePath(String filename) { if (f.isFile() && f.canRead()) { return f; } - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { // Try filename with executable extentions String[] exts = new String[] {".exe", ".bat"}; for (String ext : exts) { @@ -261,7 +261,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { } boolean isBrokenPipeException(IOException e) { - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { String errMsg = e.getMessage(); return errMsg.equalsIgnoreCase(IO_EXCEPTION_PIPE_CLOSED_WIN) || errMsg.equalsIgnoreCase(IO_EXCEPTION_PIPE_ENDED_WIN); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 76fee61..2de8997 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -159,6 +159,7 @@ import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile.CompressionType; @@ -177,7 +178,6 @@ import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.util.Shell; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; @@ -1203,7 +1203,7 @@ public static StreamStatus readColumn(DataInput in, OutputStream out) throws IOE // Default new line characters on windows are "CRLF" so detect if there are any windows // native newline characters and handle them. - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { // if the CR is not followed by the LF on windows then add it back to the stream and // proceed with next characters in the input stream. if (foundCrChar && b != Utilities.newLineCode) { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java index c3a83d4..87e266a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.io; import java.io.IOException; -import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -35,12 +34,12 @@ import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PartitionDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.compress.CompressionCodec; @@ -52,7 +51,6 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.ReflectionUtils; /** @@ -420,7 +418,7 @@ private static String getMatchingPath(Map> pathToAlias } String dirPath = dir.toUri().getPath(); - if(Shell.WINDOWS){ + if (Environments.WINDOWS) { //temp hack //do this to get rid of "/" before the drive letter in windows dirPath = new Path(dirPath).toString(); diff --git ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java index 294a3dd..85dda52 100644 --- ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/WindowsPathUtil.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.ql; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.util.Shell; +import org.apache.hadoop.hive.shims.Environments; public class WindowsPathUtil { @@ -45,7 +45,7 @@ public static void convertPathsFromWindowsToHdfs(HiveConf conf){ public static String getHdfsUriString(String uriStr) { assert uriStr != null; - if(Shell.WINDOWS) { + if (Environments.WINDOWS) { // If the URI conversion is from Windows to HDFS then replace the '\' with '/' // and remove the windows single drive letter & colon from absolute path. return uriStr.replace('\\', '/') diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 63ecb8d..8ef74bd 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -59,8 +59,8 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.mapred.TextInputFormat; -import org.apache.hadoop.util.Shell; /** * Mimics the actual query compiler in generating end to end plans and testing @@ -86,7 +86,7 @@ SessionState.start(conf); //convert possible incompatible Windows path in config - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { WindowsPathUtil.convertPathsFromWindowsToHdfs(conf); } tmpdir = System.getProperty("test.tmp.dir"); @@ -173,7 +173,7 @@ private static void fileDiff(String datafile, String testdir) throws Exception { } FSDataInputStream fi_test = fs.open((fs.listStatus(di_test))[0].getPath()); - boolean ignoreWhitespace = Shell.WINDOWS; + boolean ignoreWhitespace = Environments.WINDOWS; if (!Utilities.contentsEqual(fi_gold, fi_test, ignoreWhitespace)) { LOG.error(di_test.toString() + " does not match " + datafile); assertEquals(false, true); diff --git ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java index 5b8ec60..cd085aa 100644 --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java @@ -36,8 +36,8 @@ import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.WindowsPathUtil; import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.mapred.TextInputFormat; -import org.apache.hadoop.util.Shell; import org.apache.thrift.TException; /** @@ -63,7 +63,7 @@ protected void setUp() throws Exception { super.setUp(); hive = Hive.get(); - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { WindowsPathUtil.convertPathsFromWindowsToHdfs(hive.getConf()); } checker = new HiveMetaStoreChecker(hive); diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java index c380b69..facefd4 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java @@ -20,9 +20,9 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.Shell; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes; import org.apache.hive.service.cli.CLIService; @@ -98,7 +98,7 @@ public void run() { connector.setPort(portNum); // Linux:yes, Windows:no - connector.setReuseAddress(!Shell.WINDOWS); + connector.setReuseAddress(!Environments.WINDOWS); int maxIdleTime = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME); connector.setMaxIdleTime(maxIdleTime); diff --git shims/common/src/main/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java shims/common/src/main/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java index 228a972..c232fb4 100644 --- shims/common/src/main/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java +++ shims/common/src/main/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java @@ -22,6 +22,7 @@ import java.net.URI; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.shims.Environments; import org.apache.hadoop.util.Shell; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.shims.HadoopShims; @@ -52,7 +53,7 @@ public void initialize(URI name, Configuration conf) throws IOException { // from the supplied URI String scheme = name.getScheme(); String nameUriString = name.toString(); - if (Shell.WINDOWS) { + if (Environments.WINDOWS) { // Replace the encoded backward slash with forward slash // Remove the windows drive letter nameUriString = nameUriString.replaceAll("%5C", "/") diff --git shims/common/src/main/java/org/apache/hadoop/hive/shims/Environments.java shims/common/src/main/java/org/apache/hadoop/hive/shims/Environments.java new file mode 100644 index 0000000..5c6413d --- /dev/null +++ shims/common/src/main/java/org/apache/hadoop/hive/shims/Environments.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.shims; + +// copied from org.apache.hadoop.util.Shell, which needs HADOOP_HOME +// But for hive JDBCs, hadoop should not be required one +public class Environments { + + public enum OSType { + OS_TYPE_LINUX, + OS_TYPE_WIN, + OS_TYPE_SOLARIS, + OS_TYPE_MAC, + OS_TYPE_FREEBSD, + OS_TYPE_OTHER + } + + public static final OSType osType = getOSType(); + + static private OSType getOSType() { + String osName = System.getProperty("os.name"); + if (osName.startsWith("Windows")) { + return OSType.OS_TYPE_WIN; + } else if (osName.contains("SunOS") || osName.contains("Solaris")) { + return OSType.OS_TYPE_SOLARIS; + } else if (osName.contains("Mac")) { + return OSType.OS_TYPE_MAC; + } else if (osName.contains("FreeBSD")) { + return OSType.OS_TYPE_FREEBSD; + } else if (osName.startsWith("Linux")) { + return OSType.OS_TYPE_LINUX; + } else { + // Some other form of Unix + return OSType.OS_TYPE_OTHER; + } + } + + // Helper static vars for each platform + public static final boolean WINDOWS = (osType == OSType.OS_TYPE_WIN); + public static final boolean SOLARIS = (osType == OSType.OS_TYPE_SOLARIS); + public static final boolean MAC = (osType == OSType.OS_TYPE_MAC); + public static final boolean FREEBSD = (osType == OSType.OS_TYPE_FREEBSD); + public static final boolean LINUX = (osType == OSType.OS_TYPE_LINUX); + public static final boolean OTHER = (osType == OSType.OS_TYPE_OTHER); + +}