From 5985fc623416773e8c77f78c5c1b4f649958f65a Mon Sep 17 00:00:00 2001 From: wyj Date: Tue, 16 Jan 2018 19:38:52 -0800 Subject: [PATCH] CubeHFileJob should use currentHBaseConfiguration but not new create hbase configuration --- .../apache/kylin/storage/hbase/steps/CubeHFileJob.java | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java index 093e8ee..ce4ceac 100644 --- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java +++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java @@ -24,7 +24,6 @@ import org.apache.commons.cli.Options; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -74,11 +73,12 @@ public class CubeHFileJob extends AbstractHadoopJob { CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()); CubeInstance cube = cubeMgr.getCube(cubeName); - job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME)); + + // use current hbase configuration + Configuration configuration = HBaseConnection.getCurrentHBaseConfiguration(); + job = Job.getInstance(configuration, getOptionValue(OPTION_JOB_NAME)); setJobClasspath(job, cube.getConfig()); - // For separate HBase cluster, note the output is a qualified HDFS path if "kylin.storage.hbase.cluster-fs" is configured, ref HBaseMRSteps.getHFilePath() - HBaseConnection.addHBaseClusterNNHAConfiguration(job.getConfiguration()); addInputDirs(getOptionValue(OPTION_INPUT_PATH), job); FileOutputFormat.setOutputPath(job, output); @@ -88,12 +88,11 @@ public class CubeHFileJob extends AbstractHadoopJob { // add metadata to distributed cache attachCubeMetadata(cube, job.getConfiguration()); - Configuration hbaseConf = HBaseConfiguration.create(getConf()); - HTable htable = new HTable(hbaseConf, getOptionValue(OPTION_HTABLE_NAME)); + HTable htable = new HTable(configuration, getOptionValue(OPTION_HTABLE_NAME)); // Automatic config ! HFileOutputFormat3.configureIncrementalLoad(job, htable); - reconfigurePartitions(hbaseConf, partitionFilePath); + reconfigurePartitions(configuration, partitionFilePath); job.setInputFormatClass(SequenceFileInputFormat.class); job.setMapperClass(CubeHFileMapper.class); @@ -103,7 +102,7 @@ public class CubeHFileJob extends AbstractHadoopJob { job.setSortComparatorClass(RowKeyWritable.RowKeyComparator.class); // set block replication to 3 for hfiles - hbaseConf.set(DFSConfigKeys.DFS_REPLICATION_KEY, "3"); + configuration.set(DFSConfigKeys.DFS_REPLICATION_KEY, "3"); this.deletePath(job.getConfiguration(), output); -- 1.9.1