diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index ab30003..5bedadd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -264,5 +264,9 @@ /** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */ public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT = 60; + + // HTTP policies to be used in configuration + public static final String HTTP_POLICY_HTTP_ONLY = "HTTP_ONLY"; + public static final String HTTP_POLICY_HTTPS_ONLY = "HTTPS_ONLY"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java index d9e219a..6e0194c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java @@ -28,25 +28,41 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class HttpConfig { - private static boolean sslEnabled; + private static Policy policy; + public enum Policy { + HTTP_ONLY, + HTTPS_ONLY; + + public static Policy fromString(String value) { + if (value.equalsIgnoreCase(CommonConfigurationKeysPublic + .HTTP_POLICY_HTTP_ONLY)) { + return HTTPS_ONLY; + } + return HTTP_ONLY; + } + } static { Configuration conf = new Configuration(); - sslEnabled = conf.getBoolean( - CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, - CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT); + boolean sslEnabled = conf.getBoolean( + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT); + policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY; } - public static void setSecure(boolean secure) { - sslEnabled = secure; + public static void setPolicy(Policy policy) { + HttpConfig.policy = policy; } public static boolean isSecure() { - return sslEnabled; + return policy == Policy.HTTPS_ONLY; } public static String getSchemePrefix() { return (isSecure()) ? "https://" : "http://"; } + public static String getScheme(Policy policy) { + return policy == Policy.HTTPS_ONLY ? "https://" : "http://"; + } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java index 880804e..e5fd4b0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java @@ -54,7 +54,7 @@ @Before public void setup() throws Exception { - HttpConfig.setSecure(true); + HttpConfig.setPolicy(HttpConfig.Policy.HTTPS_ONLY); File base = new File(BASEDIR); FileUtil.fullyDelete(base); base.mkdirs(); @@ -89,7 +89,7 @@ public void cleanup() throws Exception { String classpathDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class); new File(classpathDir, CONFIG_SITE_XML).delete(); - HttpConfig.setSecure(false); + HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index 9e03812..1509cb5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -18,60 +18,29 @@ package org.apache.hadoop.mapreduce.v2.app; -import java.io.IOException; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicBoolean; - +import com.google.common.annotations.VisibleForTesting; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.mapred.FileOutputCommitter; -import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.LocalContainerLauncher; -import org.apache.hadoop.mapred.TaskAttemptListenerImpl; -import org.apache.hadoop.mapred.TaskUmbilicalProtocol; -import org.apache.hadoop.mapreduce.MRConfig; -import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.hadoop.mapred.*; +import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.TypeConverter; -import org.apache.hadoop.mapreduce.jobhistory.AMStartedEvent; -import org.apache.hadoop.mapreduce.jobhistory.EventReader; -import org.apache.hadoop.mapreduce.jobhistory.EventType; -import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryCopyService; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser; +import org.apache.hadoop.mapreduce.jobhistory.*; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; -import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; -import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.apache.hadoop.mapreduce.v2.api.records.TaskId; -import org.apache.hadoop.mapreduce.v2.api.records.TaskState; +import org.apache.hadoop.mapreduce.v2.api.records.*; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.client.MRClientService; @@ -82,30 +51,17 @@ import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobStartEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; -import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; +import org.apache.hadoop.mapreduce.v2.app.job.event.*; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl; import org.apache.hadoop.mapreduce.v2.app.local.LocalContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; -import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; -import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent; -import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator; -import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator; -import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor; -import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler; +import org.apache.hadoop.mapreduce.v2.app.rm.*; import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator; import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator; import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent; -import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; @@ -139,7 +95,14 @@ import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.SystemClock; -import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.security.PrivilegedExceptionAction; +import java.util.*; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicBoolean; /** * The Map-Reduce Application Master. @@ -1351,7 +1314,7 @@ public static void main(String[] args) { // RM/NM to issue SSL certificates but definitely not MR-AM as it is // running in user-land. MRWebAppUtil.initialize(conf); - HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInMRAM()); + HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY); // log the system properties String systemPropsToLog = MRApps.getSystemPropertiesToLog(conf); if (systemPropsToLog != null) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java index e7986d4..46c0125 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java @@ -18,14 +18,9 @@ package org.apache.hadoop.mapreduce.v2.jobhistory; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; /** * Stores Job History configuration keys that can be set by administrators of @@ -129,10 +124,11 @@ public static final String MR_HISTORY_PRINCIPAL = MR_HISTORY_PREFIX + "principal"; - /** To enable SSL in MR history server */ - public static final String MR_HS_SSL_ENABLED = MR_HISTORY_PREFIX - + "ssl.enabled"; - public static boolean DEFAULT_MR_HS_SSL_ENABLED = false; + /** To enable https in MR history server */ + public static final String MR_HS_HTTP_POLICY = MR_HISTORY_PREFIX + + "http.policy"; + public static String MR_HS_HTTP_POLICY_DEFAULT = + CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY; /**The address the history server webapp is on.*/ public static final String MR_HISTORY_WEBAPP_ADDRESS = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java index 095d25b..5bbfd92 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java @@ -17,24 +17,25 @@ */ package org.apache.hadoop.mapreduce.v2.util; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; -import java.util.Iterator; - +import com.google.common.base.Joiner; +import com.google.common.base.Splitter; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapreduce.JobID; -import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.conf.YarnConfiguration; -import com.google.common.base.Joiner; -import com.google.common.base.Splitter; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.Iterator; + +import static org.apache.hadoop.http.HttpConfig.Policy; @Private @Evolving @@ -42,63 +43,40 @@ private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults(); private static final Joiner JOINER = Joiner.on(""); - private static boolean isSSLEnabledInYARN; - private static boolean isSSLEnabledInJHS; - private static boolean isSSLEnabledInMRAM; - + private static Policy httpPolicyInYarn; + private static Policy httpPolicyInJHS; + public static void initialize(Configuration conf) { - setSSLEnabledInYARN(conf.getBoolean( - CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, - CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT)); - setSSLEnabledInJHS(conf.getBoolean(JHAdminConfig.MR_HS_SSL_ENABLED, - JHAdminConfig.DEFAULT_MR_HS_SSL_ENABLED)); - setSSLEnabledInMRAM(conf.getBoolean(MRConfig.SSL_ENABLED_KEY, - MRConfig.SSL_ENABLED_KEY_DEFAULT)); - } - - private static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) { - MRWebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN; + setHttpPolicyInYARN(conf.get( + YarnConfiguration.YARN_HTTP_POLICY_KEY, + YarnConfiguration.YARN_HTTP_POLICY_DEFAULT)); + setHttpPolicyInJHS(conf.get(JHAdminConfig.MR_HS_HTTP_POLICY, + JHAdminConfig.MR_HS_HTTP_POLICY_DEFAULT)); } - private static void setSSLEnabledInJHS(boolean isSSLEnabledInJHS) { - MRWebAppUtil.isSSLEnabledInJHS = isSSLEnabledInJHS; - } - - private static void setSSLEnabledInMRAM(boolean isSSLEnabledInMRAM) { - MRWebAppUtil.isSSLEnabledInMRAM = isSSLEnabledInMRAM; - } - - public static boolean isSSLEnabledInYARN() { - return isSSLEnabledInYARN; + private static void setHttpPolicyInYARN(String policy) { + MRWebAppUtil.httpPolicyInYarn = Policy.fromString(policy); } - public static boolean isSSLEnabledInJHS() { - return isSSLEnabledInJHS; + private static void setHttpPolicyInJHS(String policy) { + MRWebAppUtil.httpPolicyInJHS = Policy.fromString(policy); } - public static boolean isSSLEnabledInMRAM() { - return isSSLEnabledInMRAM; + public static Policy getJHSHttpPolicy() { + return MRWebAppUtil.httpPolicyInJHS; } public static String getYARNWebappScheme() { - if (isSSLEnabledInYARN) { - return "https://"; - } else { - return "http://"; - } + return HttpConfig.getScheme(httpPolicyInYarn); } public static String getJHSWebappScheme() { - if (isSSLEnabledInJHS) { - return "https://"; - } else { - return "http://"; - } + return HttpConfig.getScheme(httpPolicyInJHS); } public static void setJHSWebappURLWithoutScheme(Configuration conf, String hostAddress) { - if (isSSLEnabledInJHS) { + if (httpPolicyInJHS == Policy.HTTPS_ONLY) { conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, hostAddress); } else { conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, hostAddress); @@ -106,7 +84,7 @@ public static void setJHSWebappURLWithoutScheme(Configuration conf, } public static String getJHSWebappURLWithoutScheme(Configuration conf) { - if (isSSLEnabledInJHS) { + if (httpPolicyInJHS == Policy.HTTPS_ONLY) { return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS); } else { @@ -120,7 +98,7 @@ public static String getJHSWebappURLWithScheme(Configuration conf) { } public static InetSocketAddress getJHSWebBindAddress(Configuration conf) { - if (isSSLEnabledInJHS) { + if (httpPolicyInJHS == Policy.HTTPS_ONLY) { return conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT); @@ -168,26 +146,18 @@ public static String getApplicationWebURLOnJHSWithScheme(Configuration conf, } private static int getDefaultJHSWebappPort() { - if (isSSLEnabledInJHS) { - return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT; - } else { - return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT; - } + return httpPolicyInJHS == Policy.HTTPS_ONLY ? + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT: + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT; } private static String getDefaultJHSWebappURLWithoutScheme() { - if (isSSLEnabledInJHS) { - return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS; - } else { - return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS; - } + return httpPolicyInJHS == Policy.HTTPS_ONLY ? + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS : + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS; } - + public static String getAMWebappScheme(Configuration conf) { - if (isSSLEnabledInMRAM) { - return "https://"; - } else { - return "http://"; - } + return "http://"; } } \ No newline at end of file diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java index 830bb44..bbac5fc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java @@ -84,11 +84,6 @@ "mapreduce.shuffle.ssl.enabled"; public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false; - - public static final String SSL_ENABLED_KEY = - "mapreduce.am.ssl.enabled"; - - public static final boolean SSL_ENABLED_KEY_DEFAULT = false; public static final String SHUFFLE_CONSUMER_PLUGIN = "mapreduce.job.reduce.shuffle.consumer.plugin.class"; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml index 657805d..0f7958b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml @@ -290,20 +290,6 @@ - mapreduce.am.ssl.enabled - false - - If enabled, MapReduce application master's http server will be - started with SSL enabled. Map reduce AM by default doesn't support SSL. - If MapReduce jobs want SSL support, it is the user's responsibility to - create and manage certificates, keystores and trust-stores with appropriate - permissions. This is only for MapReduce application master and is not used - by job history server. To enable encrypted shuffle this property is not - required, instead refer to (mapreduce.shuffle.ssl.enabled) property. - - - - mapreduce.shuffle.ssl.file.buffer.size 65536 Buffer size for reading spills from file when using SSL. @@ -1226,11 +1212,13 @@ - mapreduce.jobhistory.ssl.enabled - false + mapreduce.jobhistory.http.policy + HTTP_ONLY - Whether to use SSL for the HTTP endpoints. If set to true, the - JobHistoryServer web UIs will be served over HTTPS instead HTTP. + This configures the HTTP endpoint for JobHistoryServer web UI. + The following values are supported: + - HTTP_ONLY : Service is provided only on http + - HTTPS_ONLY : Service is provided only on https diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/apt/EncryptedShuffle.apt.vm b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/apt/EncryptedShuffle.apt.vm index e05951c..63f80eb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/apt/EncryptedShuffle.apt.vm +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/apt/EncryptedShuffle.apt.vm @@ -107,7 +107,7 @@ Hadoop MapReduce Next Generation - Encrypted Shuffle *--------------------------------------+---------------------+-----------------+ | <> | <> | <> | *--------------------------------------+---------------------+-----------------+ -| <<>> | <<>> | Whether encrypted shuffle is enabled | +| <<>> | <<>> | Whether encrypted shuffle is enabled | *--------------------------------------+---------------------+-----------------+ <> This property should be marked as final in the cluster @@ -118,7 +118,7 @@ Hadoop MapReduce Next Generation - Encrypted Shuffle ------ ... - mapreduce.shuffle.ssl.enabled + mapreduce.shuffle.ssl.enable true true diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java index 168d75d..a70592e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java @@ -18,13 +18,10 @@ package org.apache.hadoop.mapreduce.v2.hs; -import java.io.IOException; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.MRConfig; @@ -46,6 +43,8 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService; +import java.io.IOException; + /****************************************************************** * {@link JobHistoryServer} is responsible for servicing all job history * related requests from client. @@ -120,7 +119,7 @@ protected void serviceInit(Configuration conf) throws Exception { // This is required for WebApps to use https if enabled. MRWebAppUtil.initialize(getConfig()); - HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInJHS()); + HttpConfig.setPolicy(MRWebAppUtil.getJHSHttpPolicy()); try { doSecureLogin(conf); } catch(IOException ie) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index dc5baa1..6005071 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -18,19 +18,15 @@ package org.apache.hadoop.yarn.conf; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; -import java.util.Arrays; - import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.ApplicationConstants; +import java.util.Arrays; + @Public @Evolving public class YarnConfiguration extends Configuration { @@ -843,7 +839,12 @@ public static final String NM_CLIENT_MAX_NM_PROXIES = YARN_PREFIX + "client.max-nodemanagers-proxies"; public static final int DEFAULT_NM_CLIENT_MAX_NM_PROXIES = 500; - + + public static final String YARN_HTTP_POLICY_KEY = + YARN_PREFIX + "http.policy"; + public static final String YARN_HTTP_POLICY_DEFAULT = + CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY; + public YarnConfiguration() { super(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index 171b118..9696042 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -100,6 +100,17 @@ + + This configures the HTTP endpoint for Yarn Daemons.The following + values are supported: + - HTTP_ONLY : Service is provided only on http + - HTTPS_ONLY : Service is provided only on https + + yarn.http.policy + HTTP_ONLY + + + The http address of the RM web application. yarn.resourcemanager.webapp.address ${yarn.resourcemanager.hostname}:8088 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/ProxyUriUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/ProxyUriUtils.java index 2be18d3..4481f60 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/ProxyUriUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/ProxyUriUtils.java @@ -18,7 +18,11 @@ package org.apache.hadoop.yarn.server.webproxy; -import static org.apache.hadoop.yarn.util.StringHelper.ujoin; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.http.HttpConfig; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.util.TrackingUriPlugin; import java.io.UnsupportedEncodingException; import java.net.URI; @@ -26,11 +30,7 @@ import java.net.URLEncoder; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.TrackingUriPlugin; +import static org.apache.hadoop.yarn.util.StringHelper.ujoin; public class ProxyUriUtils { @SuppressWarnings("unused") @@ -148,9 +148,9 @@ public static URI getUriFromAMUrl(String url) /* * check is made to make sure if AM reports with scheme then it will be * used by default otherwise it will default to the one configured using - * "hadoop.ssl.enabled". + * "yarn.http.policy". */ - return new URI(HttpConfig.getSchemePrefix() + url); + return new URI(HttpConfig.getSchemePrefix() + url); } else { return new URI(url); } @@ -168,9 +168,9 @@ public static URI getUriFromAMUrl(String scheme, String noSchemeUrl) /* * check is made to make sure if AM reports with scheme then it will be * used by default otherwise it will default to the one configured using - * "hadoop.ssl.enabled". + * "yarn.http.policy". */ - return new URI(scheme + "://" + noSchemeUrl); + return new URI(scheme + "://" + noSchemeUrl); } else { return new URI(noSchemeUrl); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java index 7f81f9b..4d949f2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java @@ -18,9 +18,6 @@ package org.apache.hadoop.yarn.server.webproxy.amfilter; -import java.util.HashMap; -import java.util.Map; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; import org.apache.hadoop.http.FilterInitializer; @@ -28,6 +25,9 @@ import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; +import java.util.HashMap; +import java.util.Map; + public class AmFilterInitializer extends FilterInitializer { private static final String FILTER_NAME = "AM_PROXY_FILTER"; private static final String FILTER_CLASS = AmIpFilter.class.getCanonicalName(); @@ -38,6 +38,7 @@ public void initFilter(FilterContainer container, Configuration conf) { String proxy = WebAppUtils.getProxyHostAndPort(conf); String[] parts = proxy.split(":"); params.put(AmIpFilter.PROXY_HOST, parts[0]); + // TODO is this correct? params.put(AmIpFilter.PROXY_URI_BASE, HttpConfig.getSchemePrefix() + proxy + System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV));