Index: eclipse-templates/.classpath =================================================================== --- eclipse-templates/.classpath (revision 798287) +++ eclipse-templates/.classpath (working copy) @@ -44,6 +44,7 @@ + Index: ant/build.xml =================================================================== --- ant/build.xml (revision 798287) +++ ant/build.xml (working copy) @@ -27,6 +27,22 @@ + + + + + + + + + + Index: shims/ivy.xml =================================================================== --- shims/ivy.xml (revision 0) +++ shims/ivy.xml (revision 0) @@ -0,0 +1,11 @@ + + + + + + + + + + + Index: shims/src/0.20/java/org/apache/hadoop/hive/shims/JettyShims.java =================================================================== --- shims/src/0.20/java/org/apache/hadoop/hive/shims/JettyShims.java (revision 0) +++ shims/src/0.20/java/org/apache/hadoop/hive/shims/JettyShims.java (revision 0) @@ -0,0 +1,51 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + + +import org.mortbay.jetty.bio.SocketConnector; +import org.mortbay.jetty.handler.RequestLogHandler; + +import java.io.IOException; + +public abstract class JettyShims { + public static class Server extends org.mortbay.jetty.Server { + public void setupListenerHostPort(String listen, int port) + throws IOException { + + SocketConnector connector = new SocketConnector(); + connector.setPort(port); + connector.setHost(listen); + this.addConnector(connector); + } + + public void addContext(WebApplicationContext context) { + RequestLogHandler rlh = new RequestLogHandler(); + rlh.setHandler(context); + this.addHandler(rlh); + } + } + + public static class WebApplicationContext + extends org.mortbay.jetty.webapp.WebAppContext { + + public void setWAR(String war) { + super.setWar(war); + } + } +} \ No newline at end of file Index: shims/src/0.20/java/org/apache/hadoop/hive/shims/HadoopShims.java =================================================================== --- shims/src/0.20/java/org/apache/hadoop/hive/shims/HadoopShims.java (revision 0) +++ shims/src/0.20/java/org/apache/hadoop/hive/shims/HadoopShims.java (revision 0) @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import java.io.IOException; + +/** + * Implemention of shims against Hadoop 0.20.0 + */ +public class HadoopShims { + public static final boolean USES_JOBSHELL=false; + + public static boolean fileSystemDeleteOnExit(FileSystem fs, Path path) + throws IOException { + + return fs.deleteOnExit(path); + } + + public static void inputFormatValidateInput(InputFormat fmt, JobConf conf) + throws IOException { + // gone in 0.18+ + } + + /** + * workaround for hadoop-17 - jobclient only looks at commandlineconfig + */ + public static void setTmpFiles(String prop, String files) { + // gone in 0.20+ + } + + public static class MiniDFSCluster extends org.apache.hadoop.hdfs.MiniDFSCluster { + public MiniDFSCluster(Configuration conf, + int numDataNodes, + boolean format, + String[] racks) throws IOException { + super(conf, numDataNodes, format, racks); + } + } +} Index: shims/src/0.17/java/org/apache/hadoop/hive/shims/JettyShims.java =================================================================== --- shims/src/0.17/java/org/apache/hadoop/hive/shims/JettyShims.java (revision 0) +++ shims/src/0.17/java/org/apache/hadoop/hive/shims/JettyShims.java (revision 0) @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + +import org.mortbay.http.SocketListener; +import java.io.IOException; + +public abstract class JettyShims { + public static class Server extends org.mortbay.jetty.Server { + public void setupListenerHostPort(String listen, int port) + throws IOException { + + SocketListener listener = new SocketListener(); + listener.setPort(port); + listener.setHost(listen); + this.addListener(listener); + } + } + + public static class WebApplicationContext + extends org.mortbay.jetty.servlet.WebApplicationContext { + + } +} \ No newline at end of file Index: shims/src/0.17/java/org/apache/hadoop/hive/shims/HadoopShims.java =================================================================== --- shims/src/0.17/java/org/apache/hadoop/hive/shims/HadoopShims.java (revision 0) +++ shims/src/0.17/java/org/apache/hadoop/hive/shims/HadoopShims.java (revision 0) @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.dfs.MiniDFSCluster; +import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import java.io.IOException; + +/** + * Implemention of shims against Hadoop 0.17.0 + */ +public class HadoopShims { + public static final boolean USES_JOBSHELL=true; + + public static boolean fileSystemDeleteOnExit(FileSystem fs, Path path) + throws IOException { + return false; // not implemented in 17 + } + + public static void inputFormatValidateInput(InputFormat fmt, JobConf conf) + throws IOException { + fmt.validateInput(conf); + } + + /** + * workaround for hadoop-17 - jobclient only looks at commandlineconfig + */ + public static void setTmpFiles(String prop, String files) { + Configuration conf = JobClient.getCommandLineConfig(); + if (conf != null) { + conf.set(prop, files); + } + } + + public static class MiniDFSCluster extends org.apache.hadoop.dfs.MiniDFSCluster { + public MiniDFSCluster(Configuration conf, + int numDataNodes, + boolean format, + String[] racks) throws IOException { + super(conf, numDataNodes, format, racks); + } + } +} Index: shims/src/0.18/java/org/apache/hadoop/hive/shims/JettyShims.java =================================================================== --- shims/src/0.18/java/org/apache/hadoop/hive/shims/JettyShims.java (revision 0) +++ shims/src/0.18/java/org/apache/hadoop/hive/shims/JettyShims.java (revision 0) @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + +import org.mortbay.http.SocketListener; +import java.io.IOException; + +public abstract class JettyShims { + public static class Server extends org.mortbay.jetty.Server { + public void setupListenerHostPort(String listen, int port) + throws IOException { + + SocketListener listener = new SocketListener(); + listener.setPort(port); + listener.setHost(listen); + this.addListener(listener); + } + } + + public static class WebApplicationContext + extends org.mortbay.jetty.servlet.WebApplicationContext { + + } +} \ No newline at end of file Index: shims/src/0.18/java/org/apache/hadoop/hive/shims/HadoopShims.java =================================================================== --- shims/src/0.18/java/org/apache/hadoop/hive/shims/HadoopShims.java (revision 0) +++ shims/src/0.18/java/org/apache/hadoop/hive/shims/HadoopShims.java (revision 0) @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.dfs.MiniDFSCluster; +import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.JobClient; +import java.io.IOException; + +/** + * Implemention of shims against Hadoop 0.18.0 + */ +public class HadoopShims { + public static final boolean USES_JOBSHELL=true; + + public static boolean fileSystemDeleteOnExit(FileSystem fs, Path path) + throws IOException { + + return fs.deleteOnExit(path); + } + + public static void inputFormatValidateInput(InputFormat fmt, JobConf conf) + throws IOException { + // gone in 0.18+ + } + + /** + * workaround for hadoop-17 - jobclient only looks at commandlineconfig + */ + public static void setTmpFiles(String prop, String files) { + Configuration conf = JobClient.getCommandLineConfig(); + if (conf != null) { + conf.set(prop, files); + } + } + + public static class MiniDFSCluster extends org.apache.hadoop.dfs.MiniDFSCluster { + public MiniDFSCluster(Configuration conf, + int numDataNodes, + boolean format, + String[] racks) throws IOException { + super(conf, numDataNodes, format, racks); + } + } +} Index: shims/src/0.19/java/org/apache/hadoop/hive/shims/JettyShims.java =================================================================== --- shims/src/0.19/java/org/apache/hadoop/hive/shims/JettyShims.java (revision 0) +++ shims/src/0.19/java/org/apache/hadoop/hive/shims/JettyShims.java (revision 0) @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + +import org.mortbay.http.SocketListener; +import java.io.IOException; + +public abstract class JettyShims { + public static class Server extends org.mortbay.jetty.Server { + public void setupListenerHostPort(String listen, int port) + throws IOException { + + SocketListener listener = new SocketListener(); + listener.setPort(port); + listener.setHost(listen); + this.addListener(listener); + } + } + + public static class WebApplicationContext + extends org.mortbay.jetty.servlet.WebApplicationContext { + + } +} \ No newline at end of file Index: shims/src/0.19/java/org/apache/hadoop/hive/shims/HadoopShims.java =================================================================== --- shims/src/0.19/java/org/apache/hadoop/hive/shims/HadoopShims.java (revision 0) +++ shims/src/0.19/java/org/apache/hadoop/hive/shims/HadoopShims.java (revision 0) @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.shims; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import java.io.IOException; + +/** + * Implemention of shims against Hadoop 0.19.0 + */ +public class HadoopShims { + public static final boolean USES_JOBSHELL=true; + + public static boolean fileSystemDeleteOnExit(FileSystem fs, Path path) + throws IOException { + + return fs.deleteOnExit(path); + } + + public static void inputFormatValidateInput(InputFormat fmt, JobConf conf) + throws IOException { + // gone in 0.18+ + } + + /** + * workaround for hadoop-17 - jobclient only looks at commandlineconfig + */ + public static void setTmpFiles(String prop, String files) { + Configuration conf = JobClient.getCommandLineConfig(); + if (conf != null) { + conf.set(prop, files); + } + } + + public static class MiniDFSCluster extends org.apache.hadoop.hdfs.MiniDFSCluster { + public MiniDFSCluster(Configuration conf, + int numDataNodes, + boolean format, + String[] racks) throws IOException { + super(conf, numDataNodes, format, racks); + } + } +} \ No newline at end of file Index: shims/build.xml =================================================================== --- shims/build.xml (revision 0) +++ shims/build.xml (revision 0) @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Index: build-common.xml =================================================================== --- build-common.xml (revision 798287) +++ build-common.xml (working copy) @@ -120,7 +120,7 @@ - + @@ -131,6 +131,8 @@ + + @@ -144,7 +146,9 @@ - + + + - + @@ -255,6 +259,10 @@ + + + Index: hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java =================================================================== --- hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java (revision 798287) +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java (working copy) @@ -6,9 +6,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; -import org.mortbay.http.SocketListener; import org.mortbay.jetty.Server; -import org.mortbay.jetty.servlet.WebApplicationContext; + +import org.apache.hadoop.hive.shims.JettyShims; + /** * This is the entry point for HWI. A web server is invoked in the same manner as the hive CLI. * Rather then opening a command line session a web server is started and a web application to work with @@ -17,8 +18,7 @@ public class HWIServer { protected static final Log l4j = LogFactory.getLog( HWIServer.class.getName() ); - private org.mortbay.jetty.Server webServer; - private SocketListener listener; + private JettyShims.Server webServer; private String [] args; /** @@ -36,8 +36,7 @@ */ public void start() throws IOException { - webServer = new org.mortbay.jetty.Server(); - listener = new SocketListener(); + webServer = new JettyShims.Server(); HiveConf conf = new HiveConf(this.getClass()); String listen = null; @@ -54,13 +53,10 @@ l4j.warn("hive.hwi.listen.port was not specified defaulting to 9999"); port=9999; } - - listener.setPort(port); - listener.setHost(listen); - - webServer.addListener(listener); - - WebApplicationContext wac = new WebApplicationContext(); + + webServer.setupListenerHostPort(listen, port); + + JettyShims.WebApplicationContext wac = new JettyShims.WebApplicationContext(); wac.setContextPath("/hwi"); String hwiWAR = conf.getVar(HiveConf.ConfVars.HIVEHWIWARFILE); @@ -117,9 +113,9 @@ /** * Shut down the running HWI Server - * @throws InterruptedException Running Thread.stop() can and probably will throw this + * @throws Exception Running Thread.stop() can and probably will throw this */ - public void stop() throws InterruptedException { + public void stop() throws Exception { l4j.info("HWI is shutting down"); webServer.stop(); } Index: build.xml =================================================================== --- build.xml (revision 798287) +++ build.xml (working copy) @@ -62,7 +62,7 @@ - + @@ -71,7 +71,7 @@ - + @@ -217,6 +217,11 @@ + + + + @@ -229,6 +234,7 @@ + Index: contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java =================================================================== --- contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java (revision 798287) +++ contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java (working copy) @@ -20,9 +20,9 @@ import java.io.IOException; import java.io.UnsupportedEncodingException; -import java.lang.reflect.Method; import java.util.Arrays; +import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; @@ -169,13 +169,7 @@ // Cannot put @Override here because hadoop 0.18+ removed this method. public void validateInput(JobConf job) throws IOException { - try { - Method validateInput = format.getClass().getDeclaredMethod("validateInput", job.getClass()); - validateInput.setAccessible(true); - validateInput.invoke(format, job); - } catch (Exception e) { - // Ignore this exception since validateInput is removed from hadoop in 0.18+. - } + HadoopShims.inputFormatValidateInput(format, job); } } Index: data/conf/hive-log4j.properties =================================================================== --- data/conf/hive-log4j.properties (revision 798287) +++ data/conf/hive-log4j.properties (working copy) @@ -58,4 +58,5 @@ log4j.category.JPOX.Query=ERROR,DRFA log4j.category.JPOX.General=ERROR,DRFA log4j.category.JPOX.Enhancer=ERROR,DRFA +log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA Index: ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (revision 798287) +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (working copy) @@ -34,8 +34,6 @@ import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; -import java.lang.reflect.Method; -import java.lang.reflect.Constructor; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -60,6 +58,7 @@ import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer; import org.apache.hadoop.hive.serde2.thrift.test.Complex; +import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; @@ -87,9 +86,8 @@ private boolean overWrite; private CliDriver cliDriver; private MiniMRCluster mr = null; - private Object dfs = null; + private HadoopShims.MiniDFSCluster dfs = null; private boolean miniMr = false; - private Class dfsClass = null; public boolean deleteDirectory(File path) { if (path.exists()) { @@ -175,30 +173,8 @@ qMap = new TreeMap(); if (miniMr) { - dfsClass = null; - - // The path for MiniDFSCluster has changed, so look in both 17 and 19 - // In hadoop 17, the path is org.apache.hadoop.dfs.MiniDFSCluster, whereas - // it is org.apache.hadoop.hdfs.MiniDFSCluster in hadoop 19. Due to this anamonly, - // use reflection to invoke the methods. - try { - dfsClass = Class.forName("org.apache.hadoop.dfs.MiniDFSCluster"); - } catch (ClassNotFoundException e) { - dfsClass = null; - } - - if (dfsClass == null) { - dfsClass = Class.forName("org.apache.hadoop.hdfs.MiniDFSCluster"); - } - - Constructor dfsCons = - dfsClass.getDeclaredConstructor(new Class[] {Configuration.class, Integer.TYPE, - Boolean.TYPE, (new String[] {}).getClass()}); - - dfs = dfsCons.newInstance(conf, 4, true, null); - Method m = dfsClass.getDeclaredMethod("getFileSystem", new Class[]{}); - FileSystem fs = (FileSystem)m.invoke(dfs, new Object[] {}); - + dfs = new HadoopShims.MiniDFSCluster(conf, 4, true, null); + FileSystem fs = dfs.getFileSystem(); mr = new MiniMRCluster(4, fs.getUri().toString(), 1); // hive.metastore.warehouse.dir needs to be set relative to the jobtracker @@ -226,10 +202,8 @@ cleanUp(); if (dfs != null) { - Method m = dfsClass.getDeclaredMethod("shutdown", new Class[]{}); - m.invoke(dfs, new Object[]{}); + dfs.shutdown(); dfs = null; - dfsClass = null; } if (mr != null) { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (revision 798287) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (working copy) @@ -20,7 +20,6 @@ import java.io.IOException; import java.io.Serializable; -import java.lang.reflect.Method; import java.util.Properties; import org.apache.hadoop.conf.Configuration; @@ -33,6 +32,7 @@ import org.apache.hadoop.hive.ql.plan.tableDesc; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.Serializer; +import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; @@ -117,12 +117,7 @@ outWriter = getRecordWriter(jc, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), outPath); // in recent hadoop versions, use deleteOnExit to clean tmp files. - try { - Method deleteOnExit = FileSystem.class.getDeclaredMethod("deleteOnExit", new Class [] {Path.class}); - deleteOnExit.setAccessible(true); - deleteOnExit.invoke(fs, outPath); - autoDelete = true; - } catch (Exception e) {} + autoDelete = HadoopShims.fileSystemDeleteOnExit(fs, outPath); initializeChildren(hconf); } catch (HiveException e) { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (revision 798287) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (working copy) @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.commons.lang.StringUtils; @@ -83,9 +84,16 @@ String isSilent = "true".equalsIgnoreCase(System.getProperty("test.silent")) ? "-silent" : ""; - String cmdLine = hadoopExec + " jar " + libJarsOption + " " + hiveJar - + " org.apache.hadoop.hive.ql.exec.ExecDriver -plan " - + planFile.toString() + " " + isSilent + " " + hiveConfArgs; + + String jarCmd; + if(HadoopShims.USES_JOBSHELL) { + jarCmd = libJarsOption + hiveJar + " " + ExecDriver.class.getName(); + } else { + jarCmd = hiveJar + " " + ExecDriver.class.getName() + libJarsOption; + } + + String cmdLine = hadoopExec + " jar " + jarCmd + + " -plan " + planFile.toString() + " " + isSilent + " " + hiveConfArgs; String files = ExecDriver.getResourceFiles(conf, SessionState.ResourceType.FILE); if(!files.isEmpty()) { @@ -102,7 +110,7 @@ executor = Runtime.getRuntime().exec(cmdLine); // user specified the memory - only applicable for local mode else { - Map variables = System.getenv(); + Map variables = System.getenv(); String[] env = new String[variables.size() + 1]; int pos = 0; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (revision 798287) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (working copy) @@ -41,6 +41,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.ql.plan.mapredWork; import org.apache.hadoop.hive.ql.plan.exprNodeDesc; import org.apache.hadoop.hive.ql.plan.partitionDesc; @@ -91,12 +92,7 @@ private void initializeFiles(String prop, String files) { if (files != null && files.length() > 0) { job.set(prop, files); - - // workaround for hadoop-17 - jobclient only looks at commandlineconfig - Configuration commandConf = JobClient.getCommandLineConfig(); - if (commandConf != null) { - commandConf.set(prop, files); - } + HadoopShims.setTmpFiles(prop, files); } } Index: ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (revision 798287) +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (working copy) @@ -22,7 +22,6 @@ import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; -import java.lang.reflect.Method; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Arrays; @@ -42,6 +41,7 @@ import org.apache.hadoop.hive.ql.plan.mapredWork; import org.apache.hadoop.hive.ql.plan.tableDesc; import org.apache.hadoop.hive.ql.plan.partitionDesc; +import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.FileInputFormat; @@ -291,13 +291,7 @@ FileInputFormat.setInputPaths(newjob, dir); newjob.setInputFormat(inputFormat.getClass()); - try { - Method validateInput = inputFormat.getClass().getDeclaredMethod("validateInput", newjob.getClass()); - validateInput.setAccessible(true); - validateInput.invoke(inputFormat, newjob); - } catch (Exception e) { - // Ignore this exception since validateInput is removed from hadoop in 0.18+. - } + HadoopShims.inputFormatValidateInput(inputFormat, newjob); } }