diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 861e438..79e676c 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -86,6 +86,7 @@ minitez.query.files.shared=alter_merge_2_orc.q,\ mapreduce2.q,\ merge1.q,\ merge2.q,\ + metadataonly1.q,\ metadata_only_queries.q,\ optimize_nullscan.q,\ orc_analyze.q,\ diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index d5324b2..70047a2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -111,6 +111,7 @@ import org.apache.hadoop.hive.ql.exec.mr.ExecMapper; import org.apache.hadoop.hive.ql.exec.mr.ExecReducer; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; +import org.apache.hadoop.hive.ql.exec.tez.DagUtils; import org.apache.hadoop.hive.ql.exec.tez.TezTask; import org.apache.hadoop.hive.ql.io.ContentSummaryInputFormat; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; @@ -3074,7 +3075,7 @@ public static double getHighestSamplePercentage (MapWork work) { * so we don't want to depend on scratch dir and context. */ public static List getInputPathsTez(JobConf job, MapWork work) throws Exception { - String scratchDir = HiveConf.getVar(job, HiveConf.ConfVars.SCRATCHDIR); + String scratchDir = job.get(DagUtils.TEZ_TMP_DIR_KEY); // we usually don't want to create dummy files for tez, however the metadata only // optimization relies on it. diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java index e116426..ebe9f92 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java @@ -124,6 +124,7 @@ */ public class DagUtils { + public static final String TEZ_TMP_DIR_KEY = "_hive_tez_tmp_dir"; private static final Log LOG = LogFactory.getLog(DagUtils.class.getName()); private static final String TEZ_DIR = "_tez_scratch_dir"; private static DagUtils instance; @@ -158,7 +159,7 @@ private void addCredentials(ReduceWork reduceWork, DAG dag) { * Creates the configuration object necessary to run a specific vertex from * map work. This includes input formats, input processor, etc. */ - private JobConf initializeVertexConf(JobConf baseConf, MapWork mapWork) { + private JobConf initializeVertexConf(JobConf baseConf, Context context, MapWork mapWork) { JobConf conf = new JobConf(baseConf); if (mapWork.getNumMapTasks() != null) { @@ -200,6 +201,7 @@ private JobConf initializeVertexConf(JobConf baseConf, MapWork mapWork) { inpFormat = CombineHiveInputFormat.class.getName(); } + conf.set(TEZ_TMP_DIR_KEY, context.getMRTmpPath().toUri().toString()); conf.set("mapred.mapper.class", ExecMapper.class.getName()); conf.set("mapred.input.format.class", inpFormat); @@ -524,7 +526,7 @@ private Vertex createVertex(JobConf conf, MapWork mapWork, /* * Helper function to create JobConf for specific ReduceWork. */ - private JobConf initializeVertexConf(JobConf baseConf, ReduceWork reduceWork) { + private JobConf initializeVertexConf(JobConf baseConf, Context context, ReduceWork reduceWork) { JobConf conf = new JobConf(baseConf); conf.set("mapred.reducer.class", ExecReducer.class.getName()); @@ -896,14 +898,14 @@ public JobConf createConfiguration(HiveConf hiveConf) throws IOException { * @param work BaseWork will be used to populate the configuration object. * @return JobConf new configuration object */ - public JobConf initializeVertexConf(JobConf conf, BaseWork work) { + public JobConf initializeVertexConf(JobConf conf, Context context, BaseWork work) { // simply dispatch the call to the right method for the actual (sub-) type of // BaseWork. if (work instanceof MapWork) { - return initializeVertexConf(conf, (MapWork)work); + return initializeVertexConf(conf, context, (MapWork)work); } else if (work instanceof ReduceWork) { - return initializeVertexConf(conf, (ReduceWork)work); + return initializeVertexConf(conf, context, (ReduceWork)work); } else { assert false; return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java index 951e918..62de830 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java @@ -263,7 +263,7 @@ DAG build(JobConf conf, TezWork work, Path scratchDir, } } else { // Regular vertices - JobConf wxConf = utils.initializeVertexConf(conf, w); + JobConf wxConf = utils.initializeVertexConf(conf, ctx, w); Vertex wx = utils.createVertex(wxConf, w, scratchDir, appJarLr, additionalLr, fs, ctx, !isFinal, work); dag.addVertex(wx);