diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java index db94271..8b0e1f3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java @@ -362,8 +362,9 @@ public void initialize(Configuration hconf, ObjectInspector[] inputOIs) } childOperatorsTag[i] = parentOperators.indexOf(this); if (childOperatorsTag[i] == -1) { - throw new HiveException( - "Hive internal error: cannot find parent in the child operator!"); + String errMsg = "Hive internal error: cannot find parent " + this + + " in " + childOperatorsArray[i]; + throw new HiveException(errMsg); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index fe3dbcf..890d456 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -308,7 +308,8 @@ private static BaseWork getBaseWork(Configuration conf, String name) { path = getPlanPath(conf, name); LOG.info("PLAN PATH = " + path); assert path != null; - if (!gWorkMap.containsKey(path)) { + if (!gWorkMap.containsKey(path) || + HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark")) { Path localPath; if (ShimLoader.getHadoopShims().isLocalMode(conf)) { localPath = path; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java index 2f5f60c..bcb9b0b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java @@ -151,6 +151,7 @@ public void configure(JobConf job) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { + e.printStackTrace(); throw new RuntimeException("Map operator initialization failed", e); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java index 651165b..568c8d2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveMapFunction.java @@ -51,11 +51,11 @@ public HiveMapFunction(byte[] buffer) { call(Iterator> it) throws Exception { if (jobConf == null) { jobConf = KryoSerializer.deserializeJobConf(this.buffer); - Path path = new Path(jobConf.getWorkingDirectory(), "plan.xml"); - FSDataInputStream in = path.getFileSystem(jobConf).open(path); - MapWork mw = Utilities.deserializePlan(in, MapWork.class, jobConf); - - Utilities.setMapWork(jobConf, mw); +// Path path = new Path(jobConf.getWorkingDirectory(), "plan.xml"); +// FSDataInputStream in = path.getFileSystem(jobConf).open(path); +// MapWork mw = Utilities.deserializePlan(in, MapWork.class, jobConf); +// +// Utilities.setMapWork(jobConf, mw); mapper = new ExecMapper(); mapper.configure(jobConf); collector = new SparkCollector();