diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java
index 30959a0..3dca0a3 100644
--- hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java
+++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecureBulkLoadUtil.java
@@ -37,6 +37,6 @@ public class SecureBulkLoadUtil {
}
public static Path getBaseStagingDir(Configuration conf) {
- return new Path(conf.get(BULKLOAD_STAGING_DIR, "/tmp/hbase-staging"));
+ return new Path(conf.get(BULKLOAD_STAGING_DIR));
}
}
diff --git hbase-common/src/main/resources/hbase-default.xml hbase-common/src/main/resources/hbase-default.xml
index f107fb7..a5b12d3 100644
--- hbase-common/src/main/resources/hbase-default.xml
+++ hbase-common/src/main/resources/hbase-default.xml
@@ -63,6 +63,20 @@ possible configurations would overwhelm and obscure the important.
machine restart.
+ hbase.fs.tmp.dir
+ /user/${user.name}/hbase-staging
+ A staging directory in default file system (HDFS)
+ for keeping temporary data.
+
+
+
+ hbase.bulkload.staging.dir
+ ${hbase.fs.tmp.dir}
+ A staging directory in default file system (HDFS)
+ for bulk loading.
+
+
+
hbase.cluster.distributed
false
The mode the cluster will be in. Possible values are
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index c96a2bf..4e94308 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -588,7 +588,7 @@ public class HFileOutputFormat2
Configuration conf = job.getConfiguration();
// create the partitions file
FileSystem fs = FileSystem.get(conf);
- Path partitionsPath = new Path(conf.get("hadoop.tmp.dir"), "partitions_" + UUID.randomUUID());
+ Path partitionsPath = new Path(conf.get("hbase.fs.tmp.dir"), "partitions_" + UUID.randomUUID());
fs.makeQualified(partitionsPath);
writePartitions(conf, partitionsPath, splitPoints);
fs.deleteOnExit(partitionsPath);
diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
index f4ccc4e..314b7b2 100644
--- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
@@ -335,7 +335,9 @@ public class TestHFileOutputFormat {
@Test
public void testJobConfiguration() throws Exception {
- Job job = new Job(util.getConfiguration());
+ Configuration conf = new Configuration(this.util.getConfiguration());
+ conf.set("hbase.fs.tmp.dir", util.getDataTestDir("testJobConfiguration").toString());
+ Job job = new Job(conf);
job.setWorkingDirectory(util.getDataTestDir("testJobConfiguration"));
HTableDescriptor tableDescriptor = Mockito.mock(HTableDescriptor.class);
RegionLocator regionLocator = Mockito.mock(RegionLocator.class);
@@ -820,6 +822,7 @@ public class TestHFileOutputFormat {
// We turn off the sequence file compression, because DefaultCodec
// pollutes the GZip codec pool with an incompatible compressor.
conf.set("io.seqfile.compression.type", "NONE");
+ conf.set("hbase.fs.tmp.dir", dir.toString());
Job job = new Job(conf, "testLocalMRIncrementalLoad");
job.setWorkingDirectory(util.getDataTestDirOnTestFS("testColumnFamilySettings"));
setupRandomGeneratorMapper(job);
diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 178eb57..7aa5dc4 100644
--- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -337,7 +337,9 @@ public class TestHFileOutputFormat2 {
@Test
public void testJobConfiguration() throws Exception {
- Job job = new Job(util.getConfiguration());
+ Configuration conf = new Configuration(this.util.getConfiguration());
+ conf.set("hbase.fs.tmp.dir", util.getDataTestDir("testJobConfiguration").toString());
+ Job job = new Job(conf);
job.setWorkingDirectory(util.getDataTestDir("testJobConfiguration"));
Table table = Mockito.mock(Table.class);
RegionLocator regionLocator = Mockito.mock(RegionLocator.class);
@@ -823,6 +825,7 @@ public class TestHFileOutputFormat2 {
// We turn off the sequence file compression, because DefaultCodec
// pollutes the GZip codec pool with an incompatible compressor.
conf.set("io.seqfile.compression.type", "NONE");
+ conf.set("hbase.fs.tmp.dir", dir.toString());
Job job = new Job(conf, "testLocalMRIncrementalLoad");
job.setWorkingDirectory(util.getDataTestDirOnTestFS("testColumnFamilySettings"));
setupRandomGeneratorMapper(job);