hadoop.profile=2.0
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (revision 1464716)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (working copy)
@@ -1,5 +1,3 @@
-
-
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
@@ -38,8 +36,10 @@
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.NavigableSet;
import java.util.Random;
import java.util.Set;
@@ -1656,6 +1656,7 @@
if (jobConf == null) {
jobConf = mrCluster.createJobConf();
}
+ HBaseConfiguration.merge(this.conf, jobConf);
jobConf.set("mapred.local.dir",
conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
LOG.info("Mini mapreduce cluster started");
Index: hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
===================================================================
--- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (revision 1464716)
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (working copy)
@@ -228,8 +228,11 @@
OUTPUT_DIR
};
- GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
+ GenericOptionsParser opts = new GenericOptionsParser(new Configuration(
+ cluster.getConfiguration()), args);
Configuration conf = opts.getConfiguration();
+ // copy or add the necessary configuration values from the map reduce config to the hbase config
+ copyConfigurationValues(UTIL.getConfiguration(), conf);
args = opts.getRemainingArgs();
assertEquals(conf.get(Export.EXPORT_BATCHING), EXPORT_BATCH_SIZE);
@@ -350,6 +353,8 @@
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(
cluster.getConfiguration()), args);
Configuration conf = opts.getConfiguration();
+ // copy or add the necessary configuration values from the map reduce config to the hbase config
+ copyConfigurationValues(UTIL.getConfiguration(), conf);
args = opts.getRemainingArgs();
Job job = Export.createSubmittableJob(conf, args);
@@ -369,6 +374,8 @@
opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
conf = opts.getConfiguration();
+ // copy or add the necessary configuration values from the map reduce config to the hbase config
+ copyConfigurationValues(UTIL.getConfiguration(), conf);
args = opts.getRemainingArgs();
job = Import.createSubmittableJob(conf, args);
@@ -392,6 +399,8 @@
opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
conf = opts.getConfiguration();
+ // copy or add the necessary configuration values from the map reduce config to the hbase config
+ copyConfigurationValues(UTIL.getConfiguration(), conf);
args = opts.getRemainingArgs();
job = Import.createSubmittableJob(conf, args);
Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
===================================================================
--- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java (revision 1464716)
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java (working copy)
@@ -18,9 +18,13 @@
package org.apache.hadoop.hbase.security.access;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import java.io.IOException;
+import java.math.BigInteger;
+import java.security.PrivilegedAction;
+import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -31,19 +35,20 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException;
import org.apache.hadoop.hbase.ipc.RequestContext;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService;
+import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest;
+import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse;
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest;
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse;
-import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest;
-import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse;
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest;
import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse;
+import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
import org.apache.hadoop.hbase.security.User;
@@ -54,12 +59,9 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
-import java.io.IOException;
-import java.math.BigInteger;
-import java.security.PrivilegedAction;
-import java.security.SecureRandom;
-import java.util.ArrayList;
-import java.util.List;
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
/**
* Coprocessor service for bulk loads in secure mode.
@@ -121,7 +123,7 @@
baseStagingDir = SecureBulkLoadUtil.getBaseStagingDir(conf);
try {
- fs = FileSystem.get(conf);
+ fs = FileSystem.get(new HBaseConfiguration(conf));
fs.mkdirs(baseStagingDir, PERM_HIDDEN);
fs.setPermission(baseStagingDir, PERM_HIDDEN);
//no sticky bit in hadoop-1.0, making directory nonempty so it never gets erased
Index: hbase-server/pom.xml
===================================================================
--- hbase-server/pom.xml (revision 1464716)
+++ hbase-server/pom.xml (working copy)
@@ -571,7 +571,8 @@
hadoop-1.0
- !hadoop.profile
+ hadoop.profile
+ 1.0
@@ -593,8 +594,7 @@
hadoop-2.0
- hadoop.profile
- 2.0
+ !hadoop.profile
Index: pom.xml
===================================================================
--- pom.xml (revision 1464716)
+++ pom.xml (working copy)
@@ -741,7 +741,7 @@
${maven.build.timestamp}
1.6
- 2.0.2-alpha
+ 2.0.4-SNAPSHOT
1.1.2
1.2
1.7
@@ -1291,7 +1291,8 @@
hadoop-1.0
- !hadoop.profile
+ hadoop.profile
+ 1.0
@@ -1349,8 +1350,7 @@
hadoop-2.0
- hadoop.profile
- 2.0
+ !hadoop.profile
Index: hbase-it/pom.xml
===================================================================
--- hbase-it/pom.xml (revision 1464716)
+++ hbase-it/pom.xml (working copy)
@@ -193,7 +193,8 @@
hadoop-1.0
- !hadoop.profile
+ hadoop.profile
+ 1.0
@@ -216,8 +217,7 @@
hadoop-2.0
- hadoop.profile
- 2.0
+ !hadoop.profile
Index: hbase-examples/pom.xml
===================================================================
--- hbase-examples/pom.xml (revision 1464716)
+++ hbase-examples/pom.xml (working copy)
@@ -117,7 +117,8 @@
hadoop-1.0
- !hadoop.profile
+ hadoop.profile
+ 1.0
@@ -139,8 +140,7 @@
hadoop-2.0
- hadoop.profile
- 2.0
+ !hadoop.profile
Index: hbase-client/pom.xml
===================================================================
--- hbase-client/pom.xml (revision 1464716)
+++ hbase-client/pom.xml (working copy)
@@ -127,7 +127,8 @@
hadoop-1.0
- !hadoop.profile
+ hadoop.profile
+ 1.0
@@ -146,8 +147,7 @@
hadoop-2.0
- hadoop.profile
- 2.0
+ !hadoop.profile
Index: hbase-common/pom.xml
===================================================================
--- hbase-common/pom.xml (revision 1464716)
+++ hbase-common/pom.xml (working copy)
@@ -218,7 +218,8 @@
hadoop-1.0
- !hadoop.profile
+ hadoop.profile
+ 1.0
@@ -237,8 +238,7 @@
hadoop-2.0
- hadoop.profile
- 2.0
+ !hadoop.profile