Index: ql/src/test/results/clientpositive/input33.q.out =================================================================== --- ql/src/test/results/clientpositive/input33.q.out (revision 1004871) +++ ql/src/test/results/clientpositive/input33.q.out (working copy) @@ -13,7 +13,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS (key STRING, value STRING) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN @@ -26,11 +26,11 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS (key STRING, value STRING) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) TOK_SERDE TOK_RECORDWRITER '../data/scripts/input20_script' TOK_SERDE TOK_RECORDREADER (TOK_TABCOLLIST (TOK_TABCOL key TOK_STRING) (TOK_TABCOL value TOK_STRING))))))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) TOK_SERDE TOK_RECORDWRITER 'input20_script' TOK_SERDE TOK_RECORDREADER (TOK_TABCOLLIST (TOK_TABCOL key TOK_STRING) (TOK_TABCOL value TOK_STRING))))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -82,7 +82,7 @@ type: string outputColumnNames: _col0, _col1 Transform Operator - command: ../data/scripts/input20_script + command: input20_script output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -125,7 +125,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS (key STRING, value STRING) PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -139,7 +139,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS (key STRING, value STRING) POSTHOOK: type: QUERY POSTHOOK: Input: default@src @@ -149,11 +149,11 @@ PREHOOK: query: SELECT * FROM dest1 SORT BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-29-43_192_703407910376694731/-mr-10000 +PREHOOK: Output: file:/tmp/jssarma/hive_2010-10-04_16-33-13_584_9168589882209549331/-mr-10000 POSTHOOK: query: SELECT * FROM dest1 SORT BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-29-43_192_703407910376694731/-mr-10000 +POSTHOOK: Output: file:/tmp/jssarma/hive_2010-10-04_16-33-13_584_9168589882209549331/-mr-10000 POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), ] 1 105_105 Index: ql/src/test/results/clientpositive/groupby_bigdata.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_bigdata.q.out (revision 1004871) +++ ql/src/test/results/clientpositive/groupby_bigdata.q.out (working copy) @@ -1,11 +1,11 @@ PREHOOK: query: select count(distinct subq.key) from -(FROM src MAP src.key USING 'python ../data/scripts/dumpdata_script.py' AS key WHERE src.key = 10) subq +(FROM src MAP src.key USING 'python dumpdata_script.py' AS key WHERE src.key = 10) subq PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/920614307/10000 +PREHOOK: Output: file:/tmp/jssarma/hive_2010-10-04_17-16-17_108_6441093232864440768/-mr-10000 POSTHOOK: query: select count(distinct subq.key) from -(FROM src MAP src.key USING 'python ../data/scripts/dumpdata_script.py' AS key WHERE src.key = 10) subq +(FROM src MAP src.key USING 'python dumpdata_script.py' AS key WHERE src.key = 10) subq POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/920614307/10000 +POSTHOOK: Output: file:/tmp/jssarma/hive_2010-10-04_17-16-17_108_6441093232864440768/-mr-10000 1000022 Index: ql/src/test/results/clientpositive/input20.q.out =================================================================== --- ql/src/test/results/clientpositive/input20.q.out (revision 1004871) +++ ql/src/test/results/clientpositive/input20.q.out (working copy) @@ -13,7 +13,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS key, value PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN @@ -26,11 +26,11 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS key, value POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) TOK_SERDE TOK_RECORDWRITER '../data/scripts/input20_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST key value)))))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) TOK_SERDE TOK_RECORDWRITER 'input20_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST key value)))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -82,7 +82,7 @@ type: string outputColumnNames: _col0, _col1 Transform Operator - command: ../data/scripts/input20_script + command: input20_script output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -125,7 +125,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS key, value PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -139,7 +139,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@src @@ -149,11 +149,11 @@ PREHOOK: query: SELECT * FROM dest1 SORT BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-28-07_786_8910183540779340449/-mr-10000 +PREHOOK: Output: file:/tmp/jssarma/hive_2010-10-04_16-33-06_819_3128330973662451588/-mr-10000 POSTHOOK: query: SELECT * FROM dest1 SORT BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-28-07_786_8910183540779340449/-mr-10000 +POSTHOOK: Output: file:/tmp/jssarma/hive_2010-10-04_16-33-06_819_3128330973662451588/-mr-10000 POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), ] 1 105_105 Index: ql/src/test/results/clientpositive/rcfile_bigdata.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_bigdata.q.out (revision 1004871) +++ ql/src/test/results/clientpositive/rcfile_bigdata.q.out (working copy) @@ -13,12 +13,12 @@ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@columnTable_Bigdata -PREHOOK: query: FROM (FROM src MAP src.key,src.value USING 'python ../data/scripts/dumpdata_script.py' AS (key,value) WHERE src.key = 10) subq +PREHOOK: query: FROM (FROM src MAP src.key,src.value USING 'python dumpdata_script.py' AS (key,value) WHERE src.key = 10) subq INSERT OVERWRITE TABLE columnTable_Bigdata SELECT subq.key, subq.value PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@columntable_bigdata -POSTHOOK: query: FROM (FROM src MAP src.key,src.value USING 'python ../data/scripts/dumpdata_script.py' AS (key,value) WHERE src.key = 10) subq +POSTHOOK: query: FROM (FROM src MAP src.key,src.value USING 'python dumpdata_script.py' AS (key,value) WHERE src.key = 10) subq INSERT OVERWRITE TABLE columnTable_Bigdata SELECT subq.key, subq.value POSTHOOK: type: QUERY POSTHOOK: Input: default@src @@ -38,11 +38,11 @@ PREHOOK: query: select count(columnTable_Bigdata.key) from columnTable_Bigdata PREHOOK: type: QUERY PREHOOK: Input: default@columntable_bigdata -PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-14_07-16-04_020_3830957675073157960/-mr-10000 +PREHOOK: Output: file:/tmp/jssarma/hive_2010-10-04_16-33-25_075_8537788494860858123/-mr-10000 POSTHOOK: query: select count(columnTable_Bigdata.key) from columnTable_Bigdata POSTHOOK: type: QUERY POSTHOOK: Input: default@columntable_bigdata -POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-14_07-16-04_020_3830957675073157960/-mr-10000 +POSTHOOK: Output: file:/tmp/jssarma/hive_2010-10-04_16-33-25_075_8537788494860858123/-mr-10000 POSTHOOK: Lineage: columntable_bigdata.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: columntable_bigdata.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] 5005500 Index: ql/src/test/queries/clientpositive/input20.q =================================================================== --- ql/src/test/queries/clientpositive/input20.q (revision 1004871) +++ ql/src/test/queries/clientpositive/input20.q (working copy) @@ -1,5 +1,7 @@ CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; +ADD FILE ../data/scripts/input20_script; + EXPLAIN FROM ( FROM src @@ -10,7 +12,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS key, value; FROM ( @@ -22,7 +24,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS key, value; SELECT * FROM dest1 SORT BY key, value; Index: ql/src/test/queries/clientpositive/input33.q =================================================================== --- ql/src/test/queries/clientpositive/input33.q (revision 1004871) +++ ql/src/test/queries/clientpositive/input33.q (working copy) @@ -1,5 +1,7 @@ CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; +ADD FILE ../data/scripts/input20_script; + EXPLAIN FROM ( FROM src @@ -10,7 +12,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS (key STRING, value STRING); FROM ( @@ -22,7 +24,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING '../data/scripts/input20_script' +USING 'input20_script' AS (key STRING, value STRING); SELECT * FROM dest1 SORT BY key, value; Index: ql/src/test/queries/clientpositive/rcfile_bigdata.q =================================================================== --- ql/src/test/queries/clientpositive/rcfile_bigdata.q (revision 1004871) +++ ql/src/test/queries/clientpositive/rcfile_bigdata.q (working copy) @@ -1,6 +1,7 @@ set hive.map.aggr.hash.percentmemory = 0.3; set hive.mapred.local.mem = 256; +add file ../data/scripts/dumpdata_script.py; CREATE table columnTable_Bigdata (key STRING, value STRING) ROW FORMAT SERDE @@ -9,7 +10,7 @@ INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'; -FROM (FROM src MAP src.key,src.value USING 'python ../data/scripts/dumpdata_script.py' AS (key,value) WHERE src.key = 10) subq +FROM (FROM src MAP src.key,src.value USING 'python dumpdata_script.py' AS (key,value) WHERE src.key = 10) subq INSERT OVERWRITE TABLE columnTable_Bigdata SELECT subq.key, subq.value; describe columnTable_Bigdata; Index: ql/src/test/queries/clientpositive/loadpart_err.q =================================================================== --- ql/src/test/queries/clientpositive/loadpart_err.q (revision 1004871) +++ ql/src/test/queries/clientpositive/loadpart_err.q (working copy) @@ -1,12 +1,14 @@ set hive.cli.errors.ignore=true; +ADD FILE ../data/scripts/error_script; + -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19, 0.20) -- (this test is flaky so it is currently disabled for all Hadoop versions) CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING); INSERT OVERWRITE TABLE loadpart1 PARTITION (ds='2009-01-01') -SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue) +SELECT TRANSFORM(src.key, src.value) USING 'error_script' AS (tkey, tvalue) FROM src; DESCRIBE loadpart1; Index: ql/src/test/queries/clientpositive/groupby_bigdata.q =================================================================== --- ql/src/test/queries/clientpositive/groupby_bigdata.q (revision 1004871) +++ ql/src/test/queries/clientpositive/groupby_bigdata.q (working copy) @@ -1,5 +1,7 @@ set hive.map.aggr.hash.percentmemory = 0.3; set hive.mapred.local.mem = 384; +add file ../data/scripts/dumpdata_script.py; + select count(distinct subq.key) from -(FROM src MAP src.key USING 'python ../data/scripts/dumpdata_script.py' AS key WHERE src.key = 10) subq; +(FROM src MAP src.key USING 'python dumpdata_script.py' AS key WHERE src.key = 10) subq; Index: ql/src/test/queries/clientpositive/scriptfile1.q =================================================================== --- ql/src/test/queries/clientpositive/scriptfile1.q (revision 1004871) +++ ql/src/test/queries/clientpositive/scriptfile1.q (working copy) @@ -1,4 +1,4 @@ -CREATE TABLE scriptfile1_dest1(key INT, value STRING); +CREATE TABLE dest1(key INT, value STRING); ADD FILE src/test/scripts/testgrep; @@ -8,6 +8,6 @@ USING 'testgrep' AS (tkey, tvalue) CLUSTER BY tkey ) tmap -INSERT OVERWRITE TABLE scriptfile1_dest1 SELECT tmap.tkey, tmap.tvalue; +INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue; -SELECT scriptfile1_dest1.* FROM scriptfile1_dest1; +SELECT dest1.* FROM dest1; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (revision 1004871) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (working copy) @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.exec; import java.io.IOException; +import java.io.File; import java.io.OutputStream; import java.io.Serializable; import java.util.HashMap; @@ -28,6 +29,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -164,9 +166,23 @@ String cmdLine = hadoopExec + " jar " + jarCmd + " -plan " + planPath.toString() + " " + isSilent + " " + hiveConfArgs; + String workDir = (new File(".")).getCanonicalPath(); String files = getResourceFiles(conf, SessionState.ResourceType.FILE); if (!files.isEmpty()) { cmdLine = cmdLine + " -files " + files; + + workDir = (new Path(ctx.getLocalTmpFileURI())).toUri().getPath(); + + if (! (new File(workDir)).mkdir()) + throw new IOException ("Cannot create tmp working dir: " + workDir); + + for (String f: StringUtils.split(files, ',')) { + Path p = new Path(f); + String target = p.toUri().getPath(); + String link = workDir + Path.SEPARATOR + p.getName(); + if (FileUtil.symLink(target, link) != 0) + throw new IOException ("Cannot link to added file: " + target + " from: " + link); + } } LOG.info("Executing: " + cmdLine); @@ -207,7 +223,7 @@ env[pos++] = name + "=" + value; } // Run ExecDriver in another JVM - executor = Runtime.getRuntime().exec(cmdLine, env); + executor = Runtime.getRuntime().exec(cmdLine, env, new File(workDir)); StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, System.out); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (revision 1004871) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (working copy) @@ -30,6 +30,7 @@ import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; +import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -84,6 +85,8 @@ import org.apache.log4j.LogManager; import org.apache.log4j.PropertyConfigurator; import org.apache.log4j.varia.NullAppender; +import org.apache.log4j.FileAppender; +import org.apache.log4j.Appender; /** * ExecDriver. @@ -985,7 +988,14 @@ printUsage(); } - console.printInfo("plan = " + planFileName); + // print out the location of the log file for the user so + // that it's easy to find reason for local mode execution failures + for (Appender appender: Collections.list + ((Enumeration)LogManager.getRootLogger().getAllAppenders())) { + if (appender instanceof FileAppender) { + console.printInfo("Execution log at: " + ((FileAppender)appender).getFile()); + } + } // log the list of job conf parameters for reference LOG.info(sb.toString()); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1004871) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -1373,37 +1373,6 @@ return HiveConf.getColumnInternalName(pos); } - /** - * If the user script command needs any modifications - do it here. - */ - private String getFixedCmd(String cmd) { - SessionState ss = SessionState.get(); - if (ss == null) { - return cmd; - } - - // for local mode - replace any references to packaged files by name with - // the reference to the original file path - if (ss.getConf().get("mapred.job.tracker", "local").equals("local")) { - Set files = ss - .list_resource(SessionState.ResourceType.FILE, null); - if ((files != null) && !files.isEmpty()) { - String prog = getScriptProgName(cmd); - String args = getScriptArgs(cmd); - - for (String oneFile : files) { - Path p = new Path(oneFile); - if (p.getName().equals(prog)) { - cmd = oneFile + args; - break; - } - } - } - } - - return cmd; - } - private String getScriptProgName(String cmd) { int end = cmd.indexOf(" "); return (end == -1) ? cmd : cmd.substring(0, end); @@ -1667,7 +1636,7 @@ Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild( new ScriptDesc( - getFixedCmd(fetchFilesNotInLocalFilesystem(stripQuotes(trfm.getChild(execPos).getText()))), + fetchFilesNotInLocalFilesystem(stripQuotes(trfm.getChild(execPos).getText())), inInfo, inRecordWriter, outInfo, outRecordReader, errRecordReader, errInfo), new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch);