diff --git hbase-handler/src/test/queries/positive/hbase_bulk.m hbase-handler/src/test/queries/positive/hbase_bulk.m index 764d924..f8bb47d 100644 --- hbase-handler/src/test/queries/positive/hbase_bulk.m +++ hbase-handler/src/test/queries/positive/hbase_bulk.m @@ -20,7 +20,8 @@ stored as inputformat 'org.apache.hadoop.mapred.TextInputFormat' outputformat -'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat'; +'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat' +location '/tmp/data/hbpartition'; -- this should produce one file, but we do not -- know what it will be called, so we will copy it to a well known @@ -30,13 +31,15 @@ select distinct value from src where value='val_100' or value='val_200'; -dfs -count /build/ql/test/data/warehouse/hbpartition; -dfs -cp /build/ql/test/data/warehouse/hbpartition/* /tmp/hbpartition.lst; +dfs -count /tmp/data/hbpartition; +dfs -cp /tmp/data/hbpartition/* /tmp/hbpartition.lst; set mapred.reduce.tasks=3; set hive.mapred.partitioner=org.apache.hadoop.mapred.lib.TotalOrderPartitioner; set total.order.partitioner.natural.order=false; set total.order.partitioner.path=/tmp/hbpartition.lst; +set mapreduce.totalorderpartitioner.naturalorder=false; +set mapreduce.totalorderpartitioner.path=/tmp/hbpartition.lst; -- this should produce three files in /tmp/hbsort/cf -- include some trailing blanks and nulls to make sure we handle them correctly diff --git hbase-handler/src/test/results/negative/cascade_dbdrop.q.out hbase-handler/src/test/results/negative/cascade_dbdrop.q.out index adc8a85..c29337d 100644 --- hbase-handler/src/test/results/negative/cascade_dbdrop.q.out +++ hbase-handler/src/test/results/negative/cascade_dbdrop.q.out @@ -37,7 +37,11 @@ Found 3 items #### A masked pattern was here #### PREHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:hbasedb +PREHOOK: Output: database:hbasedb POSTHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:hbasedb +POSTHOOK: Output: database:hbasedb Command failed with exit code = 1 Query returned non-zero code: 1, cause: null diff --git hbase-handler/src/test/results/positive/hbase_bulk.m.out hbase-handler/src/test/results/positive/hbase_bulk.m.out index 806768a..e363214 100644 --- hbase-handler/src/test/results/positive/hbase_bulk.m.out +++ hbase-handler/src/test/results/positive/hbase_bulk.m.out @@ -33,6 +33,7 @@ inputformat 'org.apache.hadoop.mapred.TextInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat' +#### A masked pattern was here #### PREHOOK: type: CREATETABLE POSTHOOK: query: -- this is a dummy table used for controlling how the input file -- for TotalOrderPartitioner is created @@ -44,6 +45,7 @@ inputformat 'org.apache.hadoop.mapred.TextInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat' +#### A masked pattern was here #### POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@hbpartition PREHOOK: query: -- this should produce one file, but we do not diff --git shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java index cac1594..1e94759 100644 --- shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java +++ shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java @@ -99,7 +99,13 @@ public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Ex @Override public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) { - return new TaskAttemptContextImpl(conf, new TaskAttemptID()) { + TaskAttemptID taskAttemptId = TaskAttemptID.forName(conf.get(MRJobConfig.TASK_ATTEMPT_ID)); + if (taskAttemptId == null) { + // If the caller is not within a mapper/reducer (if reading from the table via CliDriver), + // then TaskAttemptID.forname() may return NULL. Fall back to using default constructor. + taskAttemptId = new TaskAttemptID(); + } + return new TaskAttemptContextImpl(conf, taskAttemptId) { @Override public void progress() { progressable.progress();