Index: src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java =================================================================== --- src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java (revision 894950) +++ src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java (working copy) @@ -348,7 +348,7 @@ Path oldFile = cleanupCurrentWriter(this.filenum); this.filenum = System.currentTimeMillis(); Path newPath = computeFilename(this.filenum); - this.writer = createWriter(fs, newPath, new HBaseConfiguration(conf)); + this.writer = createWriter(fs, newPath, HBaseConfiguration.create(conf)); LOG.info((oldFile != null? "Roll " + FSUtils.getPath(oldFile) + ", entries=" + this.numEntries.get() + Index: src/java/org/apache/hadoop/hbase/HBaseConfiguration.java =================================================================== --- src/java/org/apache/hadoop/hbase/HBaseConfiguration.java (revision 894950) +++ src/java/org/apache/hadoop/hbase/HBaseConfiguration.java (working copy) @@ -47,7 +47,7 @@ } /** - * Instantinating HBaseConfiguration() is deprecated. Please use + * Instantiating HBaseConfiguration() is deprecated. Please use * HBaseConfiguration#create(conf) to construct a plain Configuration */ @Deprecated Index: src/java/org/apache/hadoop/hbase/mapreduce/BuildTableIndex.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapreduce/BuildTableIndex.java (revision 894950) +++ src/java/org/apache/hadoop/hbase/mapreduce/BuildTableIndex.java (working copy) @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.mapreduce.Cluster; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; @@ -123,8 +124,9 @@ iconf.addFromXML(content); conf.set("hbase.index.conf", content); } - - Job job = new Job(conf, "build index for table " + tableName); + Cluster mrCluster = new Cluster(conf); + Job job = Job.getInstance(mrCluster, conf); + job.setJobName("build index for table " + tableName); // number of indexes to partition into job.setNumReduceTasks(numReduceTasks); Scan scan = new Scan(); Index: src/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java (revision 894950) +++ src/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java (working copy) @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.mapreduce.Cluster; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; @@ -85,7 +86,9 @@ public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException { String tableName = args[0]; - Job job = new Job(conf, NAME + "_" + tableName); + Cluster mrCluster = new Cluster(conf); + Job job = Job.getInstance(mrCluster, conf); + job.setJobName(NAME + "_" + tableName); job.setJarByClass(RowCounter.class); // Columns are space delimited StringBuilder sb = new StringBuilder(); Index: src/java/org/apache/hadoop/hbase/mapreduce/Import.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapreduce/Import.java (revision 894950) +++ src/java/org/apache/hadoop/hbase/mapreduce/Import.java (working copy) @@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.mapreduce.Cluster; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; @@ -85,7 +86,10 @@ throws IOException { String tableName = args[0]; Path inputDir = new Path(args[1]); - Job job = new Job(conf, NAME + "_" + tableName); + Cluster mrCluster = new Cluster(conf); + Job job = Job.getInstance(mrCluster, conf); + job.setJobName(NAME + "_" + tableName); + job.setJarByClass(Importer.class); FileInputFormat.setInputPaths(job, inputDir); job.setInputFormatClass(SequenceFileInputFormat.class); Index: src/java/org/apache/hadoop/hbase/mapreduce/Export.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapreduce/Export.java (revision 894950) +++ src/java/org/apache/hadoop/hbase/mapreduce/Export.java (working copy) @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.mapreduce.Cluster; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; @@ -78,7 +79,9 @@ throws IOException { String tableName = args[0]; Path outputDir = new Path(args[1]); - Job job = new Job(conf, NAME + "_" + tableName); + Cluster mrCluster = new Cluster(conf); + Job job = Job.getInstance(mrCluster, conf); + job.setJobName(NAME + "_" + tableName); job.setJarByClass(Exporter.class); // TODO: Allow passing filter and subset of rows/columns. Scan s = new Scan();