diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index fe3dbcf..890d456 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -308,7 +308,8 @@ private static BaseWork getBaseWork(Configuration conf, String name) { path = getPlanPath(conf, name); LOG.info("PLAN PATH = " + path); assert path != null; - if (!gWorkMap.containsKey(path)) { + if (!gWorkMap.containsKey(path) || + HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark")) { Path localPath; if (ShimLoader.getHadoopShims().isLocalMode(conf)) { localPath = path; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java index 651165b..3a00ef3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java @@ -51,11 +51,6 @@ public HiveMapFunction(byte[] buffer) { call(Iterator> it) throws Exception { if (jobConf == null) { jobConf = KryoSerializer.deserializeJobConf(this.buffer); - Path path = new Path(jobConf.getWorkingDirectory(), "plan.xml"); - FSDataInputStream in = path.getFileSystem(jobConf).open(path); - MapWork mw = Utilities.deserializePlan(in, MapWork.class, jobConf); - - Utilities.setMapWork(jobConf, mw); mapper = new ExecMapper(); mapper.configure(jobConf); collector = new SparkCollector();