diff --git conf/hive-default.xml.template conf/hive-default.xml.template
index 774e82a..d188f2a 100644
--- conf/hive-default.xml.template
+++ conf/hive-default.xml.template
@@ -2162,4 +2162,23 @@
+
+ hive.jar.directory
+ hdfs:///user/hive/
+
+ This is the location hive in tez mode will look for to find a site wide
+ installed hive instance.
+
+
+
+
+ hive.user.install.directory
+ hdfs:///user/
+
+ If hive (in tez mode only) cannot find a usable hive jar in "hive.jar.directory",
+ it will upload the hive jar to <hive.user.install.directory>/<user name>
+ and use it to run queries.
+
+
+
diff --git hcatalog/webhcat/svr/src/test/data/status/hive/stderr hcatalog/webhcat/svr/src/test/data/status/hive/stderr
index d219690..a1750ad 100644
--- hcatalog/webhcat/svr/src/test/data/status/hive/stderr
+++ hcatalog/webhcat/svr/src/test/data/status/hive/stderr
@@ -19,7 +19,7 @@
WARNING: org.apache.hadoop.metrics.jvm.EventCounter is deprecated. Please use org.apache.hadoop.log.metrics.EventCounter in all the log4j.properties files.
Logging initialized using configuration in jar:file:/Users/daijy/hadoop-1.0.3/tmp/mapred/local/taskTracker/distcache/7168149899505899073_637041239_1133292873/localhost/apps/templeton/hive-0.10.0.tar.gz/hive-0.10.0/lib/hive-common-0.10.0.jar!/hive-log4j.properties
Hive history file=/tmp/daijy/hive_job_log_daijy_201305091500_862342848.txt
-Total MapReduce jobs = 1
+Total jobs = 1
Launching Job 1 out of 1
Number of reduce tasks is set to 0 since there's no reduce operator
Starting Job = job_201305091437_0012, Tracking URL = http://localhost:50030/jobdetails.jsp?jobid=job_201305091437_0012
diff --git ql/pom.xml ql/pom.xml
index f64614d..7087a4c 100644
--- ql/pom.xml
+++ ql/pom.xml
@@ -215,7 +215,7 @@
${mockito-all.version}
test
-
+
org.apache.tez
tez-api
${tez.version}
@@ -247,7 +247,7 @@
-
+
org.apache.tez
tez-runtime-library
${tez.version}
@@ -279,7 +279,7 @@
-
+
org.apache.tez
tez-mapreduce
${tez.version}
diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 1f769b1..72c04d3 100644
--- ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -1215,7 +1215,7 @@ public int execute() throws CommandNeedRetryException {
int jobs = Utilities.getMRTasks(plan.getRootTasks()).size()
+ Utilities.getTezTasks(plan.getRootTasks()).size();
if (jobs > 0) {
- console.printInfo("Total MapReduce jobs = " + jobs);
+ console.printInfo("Total jobs = " + jobs);
}
if (SessionState.get() != null) {
SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_NUM_TASKS,
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 480882a..f5c6355 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -316,7 +316,7 @@ private static BaseWork getBaseWork(Configuration conf, String name) {
}
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) {
- LOG.debug("Loading plan from: "+path.toUri().getPath());
+ LOG.debug("Loading plan from string: "+path.toUri().getPath());
String planString = conf.get(path.toUri().getPath());
if (planString == null) {
LOG.debug("Could not find plan string in conf");
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java
index 7c7b7d9..2df8ab9 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java
@@ -81,7 +81,7 @@ public void load(ExecMapperContext context,
while (kvReader.next()) {
MapJoinKey key = new MapJoinKey();
key.read(mapJoinTableSerdes[pos].getKeyContext(), (Writable)kvReader.getCurrentKey());
-
+
MapJoinRowContainer values = tableContainer.get(key);
if(values == null){
values = new MapJoinRowContainer();
diff --git shims/0.23/pom.xml shims/0.23/pom.xml
index eee2865..e327691 100644
--- shims/0.23/pom.xml
+++ shims/0.23/pom.xml
@@ -89,13 +89,13 @@
${jetty.version}
true
-
+
org.apache.tez
tez-api
${tez.version}
true
-
+
org.apache.tez
tez-dag
${tez.version}
@@ -107,13 +107,13 @@
${tez.version}
true
-
+
org.apache.tez
tez-runtime-library
${tez.version}
true
-
+
org.apache.tez
tez-mapreduce
${tez.version}
@@ -149,8 +149,8 @@
${hadoop-23.version}
true
-
- org.apache.tez
+
+ org.apache.tez
tez-tests
${tez.version}
true
diff --git shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
index b570d1d..49cfee9 100644
--- shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -201,7 +201,7 @@ public MiniMrShim() {
conf = null;
}
- public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
+ public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
String nameNode, int numDir) throws IOException {
this.conf = conf;
@@ -254,7 +254,7 @@ public MiniMrShim getMiniTezCluster(Configuration conf, int numberOfTaskTrackers
private final MiniTezCluster mr;
private final Configuration conf;
- public MiniTezShim(Configuration conf, int numberOfTaskTrackers,
+ public MiniTezShim(Configuration conf, int numberOfTaskTrackers,
String nameNode, int numDir) throws IOException {
mr = new MiniTezCluster("hive", numberOfTaskTrackers);
@@ -273,7 +273,7 @@ public int getJobTrackerPort() throws UnsupportedOperationException {
if (StringUtils.isBlank(address)) {
throw new IllegalArgumentException("Invalid YARN resource manager port.");
}
-
+
return Integer.parseInt(address);
}
@@ -281,7 +281,7 @@ public int getJobTrackerPort() throws UnsupportedOperationException {
public void shutdown() throws IOException {
mr.stop();
}
-
+
@Override
public void setupConfiguration(Configuration conf) {
Configuration config = mr.getConfig();