diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java index 81318beaa1..b3a187da63 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.shims.ShimLoader; @@ -64,7 +65,8 @@ HiveOutputFormat { public static final String HFILE_FAMILY_PATH = "hfile.family.path"; - + public static final String OUTPUT_TABLE_NAME_CONF_KEY = + "hbase.mapreduce.hfileoutputformat.table.name"; static final Logger LOG = LoggerFactory.getLogger(HiveHFileOutputFormat.class.getName()); private @@ -95,6 +97,16 @@ public RecordWriter getHiveRecordWriter( Properties tableProperties, final Progressable progressable) throws IOException { + String hbaseTableName = jc.get(HBaseSerDe.HBASE_TABLE_NAME); + if (hbaseTableName == null) { + hbaseTableName = tableProperties.getProperty(hive_metastoreConstants.META_TABLE_NAME); + hbaseTableName = hbaseTableName.toLowerCase(); + if (hbaseTableName.startsWith(HBaseStorageHandler.DEFAULT_PREFIX)) { + hbaseTableName = hbaseTableName.substring(HBaseStorageHandler.DEFAULT_PREFIX.length()); + } + } + jc.set(OUTPUT_TABLE_NAME_CONF_KEY, hbaseTableName); + // Read configuration for the target path, first from jobconf, then from table properties String hfilePath = getFamilyPath(jc, tableProperties); if (hfilePath == null) { diff --git a/hbase-handler/src/test/queries/positive/hbase_bulk.q b/hbase-handler/src/test/queries/positive/hbase_bulk.q index 5e0c14e08d..475aafc1ce 100644 --- a/hbase-handler/src/test/queries/positive/hbase_bulk.q +++ b/hbase-handler/src/test/queries/positive/hbase_bulk.q @@ -9,7 +9,7 @@ create table hbsort(key string, val string, val2 string) stored as INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.hbase.HiveHFileOutputFormat' -TBLPROPERTIES ('hfile.family.path' = '/tmp/hbsort/cf','hbase.mapreduce.hfileoutputformat.table.name'='hbsort'); +TBLPROPERTIES ('hfile.family.path' = '/tmp/hbsort/cf'); -- this is a dummy table used for controlling how the input file -- for TotalOrderPartitioner is created diff --git a/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q b/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q index ac2fdfade6..85581ecdac 100644 --- a/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q +++ b/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q @@ -6,7 +6,7 @@ drop table if exists hb_target; create table hb_target(key int, val string) stored by 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' with serdeproperties ('hbase.columns.mapping' = ':key,cf:val') -tblproperties ('hbase.mapreduce.hfileoutputformat.table.name' = 'positive_hbase_handler_bulk'); +tblproperties ('hbase.table.name' = 'positive_hbase_handler_bulk'); set hive.hbase.generatehfiles=true; set hfile.family.path=/tmp/hb_target/cf; diff --git a/hbase-handler/src/test/results/positive/hbase_handler_bulk.q.out b/hbase-handler/src/test/results/positive/hbase_handler_bulk.q.out index 10e1c0a1e9..1f42567a4b 100644 --- a/hbase-handler/src/test/results/positive/hbase_handler_bulk.q.out +++ b/hbase-handler/src/test/results/positive/hbase_handler_bulk.q.out @@ -5,14 +5,14 @@ POSTHOOK: type: DROPTABLE PREHOOK: query: create table hb_target(key int, val string) stored by 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' with serdeproperties ('hbase.columns.mapping' = ':key,cf:val') -tblproperties ('hbase.mapreduce.hfileoutputformat.table.name' = 'positive_hbase_handler_bulk') +tblproperties ('hbase.table.name' = 'positive_hbase_handler_bulk') PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@hb_target POSTHOOK: query: create table hb_target(key int, val string) stored by 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' with serdeproperties ('hbase.columns.mapping' = ':key,cf:val') -tblproperties ('hbase.mapreduce.hfileoutputformat.table.name' = 'positive_hbase_handler_bulk') +tblproperties ('hbase.table.name' = 'positive_hbase_handler_bulk') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@hb_target