hadoop.profile=2.0 Index: hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (revision 1464987) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (working copy) @@ -1,5 +1,3 @@ - - /** * * Licensed to the Apache Software Foundation (ASF) under one @@ -38,8 +36,10 @@ import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.NavigableSet; import java.util.Random; import java.util.Set; @@ -1633,6 +1633,27 @@ } /** + * When running on Hadoop 2, we need to copy (or add) configuration values for keys + * that start with "yarn." (from the map reduce minicluster) to the + * configuration that will be used during the test (from the HBase minicluster). + * YARN configuration values are set properly in the map reduce minicluster, + * but not necessarily in the HBase mini cluster. + * @param srcConf the configuration to copy from (the map reduce minicluster version) + * @param destConf the configuration to copy to (the HBase minicluster version) + */ + public static void copyConfigurationValues(Configuration srcConf, Configuration destConf) { + Iterator> it = srcConf.iterator(); + while (it.hasNext()) { + Map.Entry entry = it.next(); + String key = entry.getKey(); + String value = entry.getValue(); + if (key.startsWith("yarn.") && !value.isEmpty()) { + destConf.set(key, value); + } + } + } + + /** * Starts a MiniMRCluster. Call {@link #setFileSystemURI(String)} to use a different * filesystem. * @param servers The number of TaskTracker's to start. @@ -1656,6 +1677,7 @@ if (jobConf == null) { jobConf = mrCluster.createJobConf(); } + copyConfigurationValues(jobConf, this.conf); jobConf.set("mapred.local.dir", conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not LOG.info("Mini mapreduce cluster started"); Index: hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (revision 1464987) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (working copy) @@ -90,27 +90,6 @@ } /** - * When running on Hadoop 2, we need to copy (or add) configuration values for keys - * that start with "yarn." (from the map reduce minicluster) to the - * configuration that will be used during the test (from the HBase minicluster). - * YARN configuration values are set properly in the map reduce minicluster, - * but not necessarily in the HBase mini cluster. - * @param srcConf the configuration to copy from (the map reduce minicluster version) - * @param destConf the configuration to copy to (the HBase minicluster version) - */ - private void copyConfigurationValues(Configuration srcConf, Configuration destConf) { - Iterator> it = srcConf.iterator(); - while (it.hasNext()) { - Map.Entry entry = it.next(); - String key = entry.getKey(); - String value = entry.getValue(); - if (key.startsWith("yarn.") && !value.isEmpty()) { - destConf.set(key, value); - } - } - } - - /** * Test simple replication case with column mapping * @throws Exception */ @@ -139,7 +118,7 @@ Configuration conf = opts.getConfiguration(); // copy or add the necessary configuration values from the map reduce config to the hbase config - copyConfigurationValues(UTIL.getConfiguration(), conf); + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); Job job = Export.createSubmittableJob(conf, args); @@ -160,7 +139,7 @@ conf = opts.getConfiguration(); // copy or add the necessary configuration values from the map reduce config to the hbase config - copyConfigurationValues(UTIL.getConfiguration(), conf); + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); job = Import.createSubmittableJob(conf, args); @@ -192,7 +171,7 @@ Configuration conf = opts.getConfiguration(); // copy or add the necessary configuration values from the map reduce config to the hbase config - copyConfigurationValues(UTIL.getConfiguration(), conf); + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); Job job = Export.createSubmittableJob(conf, args); @@ -228,8 +207,11 @@ OUTPUT_DIR }; - GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + GenericOptionsParser opts = new GenericOptionsParser(new Configuration( + cluster.getConfiguration()), args); Configuration conf = opts.getConfiguration(); + // copy or add the necessary configuration values from the map reduce config to the hbase config + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); assertEquals(conf.get(Export.EXPORT_BATCHING), EXPORT_BATCH_SIZE); @@ -278,7 +260,7 @@ Configuration conf = opts.getConfiguration(); // copy or add the necessary configuration values from the map reduce config to the hbase config - copyConfigurationValues(UTIL.getConfiguration(), conf); + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); Job job = Export.createSubmittableJob(conf, args); @@ -305,7 +287,7 @@ conf = opts.getConfiguration(); // copy or add the necessary configuration values from the map reduce config to the hbase config - copyConfigurationValues(UTIL.getConfiguration(), conf); + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); job = Import.createSubmittableJob(conf, args); @@ -350,6 +332,8 @@ GenericOptionsParser opts = new GenericOptionsParser(new Configuration( cluster.getConfiguration()), args); Configuration conf = opts.getConfiguration(); + // copy or add the necessary configuration values from the map reduce config to the hbase config + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); Job job = Export.createSubmittableJob(conf, args); @@ -369,6 +353,8 @@ opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); conf = opts.getConfiguration(); + // copy or add the necessary configuration values from the map reduce config to the hbase config + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); job = Import.createSubmittableJob(conf, args); @@ -392,6 +378,8 @@ opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); conf = opts.getConfiguration(); + // copy or add the necessary configuration values from the map reduce config to the hbase config + HBaseTestingUtility.copyConfigurationValues(UTIL.getConfiguration(), conf); args = opts.getRemainingArgs(); job = Import.createSubmittableJob(conf, args); Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java (revision 1464987) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java (working copy) @@ -18,9 +18,13 @@ package org.apache.hadoop.hbase.security.access; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import java.io.IOException; +import java.math.BigInteger; +import java.security.PrivilegedAction; +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.List; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -31,19 +35,20 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; -import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException; +import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.coprocessor.CoprocessorService; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException; import org.apache.hadoop.hbase.ipc.RequestContext; import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest; -import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.security.SecureBulkLoadUtil; import org.apache.hadoop.hbase.security.User; @@ -54,12 +59,9 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import java.io.IOException; -import java.math.BigInteger; -import java.security.PrivilegedAction; -import java.security.SecureRandom; -import java.util.ArrayList; -import java.util.List; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; /** * Coprocessor service for bulk loads in secure mode. @@ -121,7 +123,7 @@ baseStagingDir = SecureBulkLoadUtil.getBaseStagingDir(conf); try { - fs = FileSystem.get(conf); + fs = FileSystem.get(new HBaseConfiguration(conf)); fs.mkdirs(baseStagingDir, PERM_HIDDEN); fs.setPermission(baseStagingDir, PERM_HIDDEN); //no sticky bit in hadoop-1.0, making directory nonempty so it never gets erased Index: hbase-server/pom.xml =================================================================== --- hbase-server/pom.xml (revision 1464987) +++ hbase-server/pom.xml (working copy) @@ -571,7 +571,8 @@ hadoop-1.0 - !hadoop.profile + hadoop.profile + 1.0 @@ -593,8 +594,7 @@ hadoop-2.0 - hadoop.profile - 2.0 + !hadoop.profile Index: pom.xml =================================================================== --- pom.xml (revision 1464987) +++ pom.xml (working copy) @@ -741,7 +741,7 @@ ${maven.build.timestamp} 1.6 - 2.0.2-alpha + 2.0.4-SNAPSHOT 1.1.2 1.2 1.7 @@ -1291,7 +1291,8 @@ hadoop-1.0 - !hadoop.profile + hadoop.profile + 1.0 @@ -1349,8 +1350,7 @@ hadoop-2.0 - hadoop.profile - 2.0 + !hadoop.profile Index: hbase-it/pom.xml =================================================================== --- hbase-it/pom.xml (revision 1464987) +++ hbase-it/pom.xml (working copy) @@ -193,7 +193,8 @@ hadoop-1.0 - !hadoop.profile + hadoop.profile + 1.0 @@ -216,8 +217,7 @@ hadoop-2.0 - hadoop.profile - 2.0 + !hadoop.profile Index: hbase-examples/pom.xml =================================================================== --- hbase-examples/pom.xml (revision 1464987) +++ hbase-examples/pom.xml (working copy) @@ -117,7 +117,8 @@ hadoop-1.0 - !hadoop.profile + hadoop.profile + 1.0 @@ -139,8 +140,7 @@ hadoop-2.0 - hadoop.profile - 2.0 + !hadoop.profile Index: hbase-client/pom.xml =================================================================== --- hbase-client/pom.xml (revision 1464987) +++ hbase-client/pom.xml (working copy) @@ -127,7 +127,8 @@ hadoop-1.0 - !hadoop.profile + hadoop.profile + 1.0 @@ -146,8 +147,7 @@ hadoop-2.0 - hadoop.profile - 2.0 + !hadoop.profile Index: hbase-common/pom.xml =================================================================== --- hbase-common/pom.xml (revision 1464987) +++ hbase-common/pom.xml (working copy) @@ -218,7 +218,8 @@ hadoop-1.0 - !hadoop.profile + hadoop.profile + 1.0 @@ -237,8 +238,7 @@ hadoop-2.0 - hadoop.profile - 2.0 + !hadoop.profile