diff --git hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index ab30003..5bedadd 100644 --- hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -264,5 +264,9 @@ /** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */ public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT = 60; + + // HTTP policies to be used in configuration + public static final String HTTP_POLICY_HTTP_ONLY = "HTTP_ONLY"; + public static final String HTTP_POLICY_HTTPS_ONLY = "HTTPS_ONLY"; } diff --git hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java index d9e219a..fe3e5ae 100644 --- hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java +++ hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java @@ -28,25 +28,41 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class HttpConfig { - private static boolean sslEnabled; + private static Policy policy; + public enum Policy { + HTTP_ONLY, + HTTPS_ONLY; + + public static Policy fromString(String value) { + if (value.equalsIgnoreCase(CommonConfigurationKeysPublic + .HTTP_POLICY_HTTPS_ONLY)) { + return HTTPS_ONLY; + } + return HTTP_ONLY; + } + } static { Configuration conf = new Configuration(); - sslEnabled = conf.getBoolean( - CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, - CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT); + boolean sslEnabled = conf.getBoolean( + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT); + policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY; } - public static void setSecure(boolean secure) { - sslEnabled = secure; + public static void setPolicy(Policy policy) { + HttpConfig.policy = policy; } public static boolean isSecure() { - return sslEnabled; + return policy == Policy.HTTPS_ONLY; } public static String getSchemePrefix() { return (isSecure()) ? "https://" : "http://"; } + public static String getScheme(Policy policy) { + return policy == Policy.HTTPS_ONLY ? "https://" : "http://"; + } } diff --git hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java index 880804e..e5fd4b0 100644 --- hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java +++ hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java @@ -54,7 +54,7 @@ @Before public void setup() throws Exception { - HttpConfig.setSecure(true); + HttpConfig.setPolicy(HttpConfig.Policy.HTTPS_ONLY); File base = new File(BASEDIR); FileUtil.fullyDelete(base); base.mkdirs(); @@ -89,7 +89,7 @@ public void cleanup() throws Exception { String classpathDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class); new File(classpathDir, CONFIG_SITE_XML).delete(); - HttpConfig.setSecure(false); + HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index 9e03812..1509cb5 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -18,60 +18,29 @@ package org.apache.hadoop.mapreduce.v2.app; -import java.io.IOException; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicBoolean; - +import com.google.common.annotations.VisibleForTesting; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.mapred.FileOutputCommitter; -import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.LocalContainerLauncher; -import org.apache.hadoop.mapred.TaskAttemptListenerImpl; -import org.apache.hadoop.mapred.TaskUmbilicalProtocol; -import org.apache.hadoop.mapreduce.MRConfig; -import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.hadoop.mapred.*; +import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.TypeConverter; -import org.apache.hadoop.mapreduce.jobhistory.AMStartedEvent; -import org.apache.hadoop.mapreduce.jobhistory.EventReader; -import org.apache.hadoop.mapreduce.jobhistory.EventType; -import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryCopyService; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser; +import org.apache.hadoop.mapreduce.jobhistory.*; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; -import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; -import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.apache.hadoop.mapreduce.v2.api.records.TaskId; -import org.apache.hadoop.mapreduce.v2.api.records.TaskState; +import org.apache.hadoop.mapreduce.v2.api.records.*; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.client.MRClientService; @@ -82,30 +51,17 @@ import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.JobStartEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; -import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent; -import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; +import org.apache.hadoop.mapreduce.v2.app.job.event.*; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl; import org.apache.hadoop.mapreduce.v2.app.local.LocalContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; -import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; -import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent; -import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator; -import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator; -import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor; -import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler; +import org.apache.hadoop.mapreduce.v2.app.rm.*; import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator; import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator; import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent; -import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; @@ -139,7 +95,14 @@ import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.SystemClock; -import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.security.PrivilegedExceptionAction; +import java.util.*; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicBoolean; /** * The Map-Reduce Application Master. @@ -1351,7 +1314,7 @@ public static void main(String[] args) { // RM/NM to issue SSL certificates but definitely not MR-AM as it is // running in user-land. MRWebAppUtil.initialize(conf); - HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInMRAM()); + HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY); // log the system properties String systemPropsToLog = MRApps.getSystemPropertiesToLog(conf); if (systemPropsToLog != null) { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java index bb188c0..d7929cc 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java @@ -63,7 +63,8 @@ protected AppController(App app, Configuration conf, RequestContext ctx, set(APP_ID, app.context.getApplicationID().toString()); set(RM_WEB, JOINER.join(MRWebAppUtil.getYARNWebappScheme(), - WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf))); + WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf, + MRWebAppUtil.getYARNHttpPolicy()))); } @Inject diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java index e7986d4..ee7dae9 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java @@ -18,14 +18,9 @@ package org.apache.hadoop.mapreduce.v2.jobhistory; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; /** * Stores Job History configuration keys that can be set by administrators of @@ -129,10 +124,11 @@ public static final String MR_HISTORY_PRINCIPAL = MR_HISTORY_PREFIX + "principal"; - /** To enable SSL in MR history server */ - public static final String MR_HS_SSL_ENABLED = MR_HISTORY_PREFIX - + "ssl.enabled"; - public static boolean DEFAULT_MR_HS_SSL_ENABLED = false; + /** To enable https in MR history server */ + public static final String MR_HS_HTTP_POLICY = MR_HISTORY_PREFIX + + "http.policy"; + public static String DEFAULT_MR_HS_HTTP_POLICY = + CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY; /**The address the history server webapp is on.*/ public static final String MR_HISTORY_WEBAPP_ADDRESS = diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java index 095d25b..49a0407 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java @@ -17,24 +17,25 @@ */ package org.apache.hadoop.mapreduce.v2.util; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; -import java.util.Iterator; - +import com.google.common.base.Joiner; +import com.google.common.base.Splitter; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapreduce.JobID; -import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.conf.YarnConfiguration; -import com.google.common.base.Joiner; -import com.google.common.base.Splitter; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.Iterator; + +import static org.apache.hadoop.http.HttpConfig.Policy; @Private @Evolving @@ -42,63 +43,44 @@ private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults(); private static final Joiner JOINER = Joiner.on(""); - private static boolean isSSLEnabledInYARN; - private static boolean isSSLEnabledInJHS; - private static boolean isSSLEnabledInMRAM; - + private static Policy httpPolicyInYarn; + private static Policy httpPolicyInJHS; + public static void initialize(Configuration conf) { - setSSLEnabledInYARN(conf.getBoolean( - CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, - CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT)); - setSSLEnabledInJHS(conf.getBoolean(JHAdminConfig.MR_HS_SSL_ENABLED, - JHAdminConfig.DEFAULT_MR_HS_SSL_ENABLED)); - setSSLEnabledInMRAM(conf.getBoolean(MRConfig.SSL_ENABLED_KEY, - MRConfig.SSL_ENABLED_KEY_DEFAULT)); + setHttpPolicyInYARN(conf.get( + YarnConfiguration.YARN_HTTP_POLICY_KEY, + YarnConfiguration.YARN_HTTP_POLICY_DEFAULT)); + setHttpPolicyInJHS(conf.get(JHAdminConfig.MR_HS_HTTP_POLICY, + JHAdminConfig.DEFAULT_MR_HS_HTTP_POLICY)); } - private static void setSSLEnabledInYARN(boolean isSSLEnabledInYARN) { - MRWebAppUtil.isSSLEnabledInYARN = isSSLEnabledInYARN; + private static void setHttpPolicyInJHS(String policy) { + MRWebAppUtil.httpPolicyInJHS = Policy.fromString(policy); } - private static void setSSLEnabledInJHS(boolean isSSLEnabledInJHS) { - MRWebAppUtil.isSSLEnabledInJHS = isSSLEnabledInJHS; + private static void setHttpPolicyInYARN(String policy) { + MRWebAppUtil.httpPolicyInYarn = Policy.fromString(policy); } - private static void setSSLEnabledInMRAM(boolean isSSLEnabledInMRAM) { - MRWebAppUtil.isSSLEnabledInMRAM = isSSLEnabledInMRAM; + public static Policy getJHSHttpPolicy() { + return MRWebAppUtil.httpPolicyInJHS; } - public static boolean isSSLEnabledInYARN() { - return isSSLEnabledInYARN; - } - - public static boolean isSSLEnabledInJHS() { - return isSSLEnabledInJHS; - } - - public static boolean isSSLEnabledInMRAM() { - return isSSLEnabledInMRAM; + public static Policy getYARNHttpPolicy() { + return MRWebAppUtil.httpPolicyInYarn; } public static String getYARNWebappScheme() { - if (isSSLEnabledInYARN) { - return "https://"; - } else { - return "http://"; - } + return HttpConfig.getScheme(httpPolicyInYarn); } public static String getJHSWebappScheme() { - if (isSSLEnabledInJHS) { - return "https://"; - } else { - return "http://"; - } + return HttpConfig.getScheme(httpPolicyInJHS); } public static void setJHSWebappURLWithoutScheme(Configuration conf, String hostAddress) { - if (isSSLEnabledInJHS) { + if (httpPolicyInJHS == Policy.HTTPS_ONLY) { conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, hostAddress); } else { conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, hostAddress); @@ -106,7 +88,7 @@ public static void setJHSWebappURLWithoutScheme(Configuration conf, } public static String getJHSWebappURLWithoutScheme(Configuration conf) { - if (isSSLEnabledInJHS) { + if (httpPolicyInJHS == Policy.HTTPS_ONLY) { return conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS); } else { @@ -120,7 +102,7 @@ public static String getJHSWebappURLWithScheme(Configuration conf) { } public static InetSocketAddress getJHSWebBindAddress(Configuration conf) { - if (isSSLEnabledInJHS) { + if (httpPolicyInJHS == Policy.HTTPS_ONLY) { return conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT); @@ -168,26 +150,18 @@ public static String getApplicationWebURLOnJHSWithScheme(Configuration conf, } private static int getDefaultJHSWebappPort() { - if (isSSLEnabledInJHS) { - return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT; - } else { - return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT; - } + return httpPolicyInJHS == Policy.HTTPS_ONLY ? + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT: + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT; } private static String getDefaultJHSWebappURLWithoutScheme() { - if (isSSLEnabledInJHS) { - return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS; - } else { - return JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS; - } + return httpPolicyInJHS == Policy.HTTPS_ONLY ? + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS : + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS; } - + public static String getAMWebappScheme(Configuration conf) { - if (isSSLEnabledInMRAM) { - return "https://"; - } else { - return "http://"; - } + return "http://"; } } \ No newline at end of file diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java index 830bb44..bbac5fc 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java @@ -84,11 +84,6 @@ "mapreduce.shuffle.ssl.enabled"; public static final boolean SHUFFLE_SSL_ENABLED_DEFAULT = false; - - public static final String SSL_ENABLED_KEY = - "mapreduce.am.ssl.enabled"; - - public static final boolean SSL_ENABLED_KEY_DEFAULT = false; public static final String SHUFFLE_CONSUMER_PLUGIN = "mapreduce.job.reduce.shuffle.consumer.plugin.class"; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml index 0e1b21e..598d106 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml @@ -290,20 +290,6 @@ - mapreduce.am.ssl.enabled - false - - If enabled, MapReduce application master's http server will be - started with SSL enabled. Map reduce AM by default doesn't support SSL. - If MapReduce jobs want SSL support, it is the user's responsibility to - create and manage certificates, keystores and trust-stores with appropriate - permissions. This is only for MapReduce application master and is not used - by job history server. To enable encrypted shuffle this property is not - required, instead refer to (mapreduce.shuffle.ssl.enabled) property. - - - - mapreduce.shuffle.ssl.file.buffer.size 65536 Buffer size for reading spills from file when using SSL. @@ -1235,11 +1221,13 @@ - mapreduce.jobhistory.ssl.enabled - false + mapreduce.jobhistory.http.policy + HTTP_ONLY - Whether to use SSL for the HTTP endpoints. If set to true, the - JobHistoryServer web UIs will be served over HTTPS instead HTTP. + This configures the HTTP endpoint for JobHistoryServer web UI. + The following values are supported: + - HTTP_ONLY : Service is provided only on http + - HTTPS_ONLY : Service is provided only on https diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java index 168d75d..4fc84c9 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java @@ -24,7 +24,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.MRConfig; @@ -120,7 +119,7 @@ protected void serviceInit(Configuration conf) throws Exception { // This is required for WebApps to use https if enabled. MRWebAppUtil.initialize(getConfig()); - HttpConfig.setSecure(MRWebAppUtil.isSSLEnabledInJHS()); + HttpConfig.setPolicy(MRWebAppUtil.getJHSHttpPolicy()); try { doSecureLogin(conf); } catch(IOException ie) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 31f3442..2003e13 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.ApplicationConstants; @@ -862,7 +863,12 @@ public static final String NM_CLIENT_MAX_NM_PROXIES = YARN_PREFIX + "client.max-nodemanagers-proxies"; public static final int DEFAULT_NM_CLIENT_MAX_NM_PROXIES = 500; - + + public static final String YARN_HTTP_POLICY_KEY = + YARN_PREFIX + "http.policy"; + public static final String YARN_HTTP_POLICY_DEFAULT = + CommonConfigurationKeysPublic.HTTP_POLICY_HTTP_ONLY; + public YarnConfiguration() { super(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java index c340332..ede5501 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java @@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.HttpConfig; +import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -97,8 +98,14 @@ public static String getResolvedRMWebAppURLWithScheme(Configuration conf) { } public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf) { + return getResolvedRMWebAppURLWithoutScheme(conf, + HttpConfig.isSecure() ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY); + } + + public static String getResolvedRMWebAppURLWithoutScheme(Configuration conf, + Policy httpPolicy) { InetSocketAddress address = null; - if (HttpConfig.isSecure()) { + if (httpPolicy == Policy.HTTPS_ONLY) { address = conf.getSocketAddr(YarnConfiguration.RM_WEBAPP_HTTPS_ADDRESS, YarnConfiguration.DEFAULT_RM_WEBAPP_HTTPS_ADDRESS, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index 0127fcc..86501ad 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -100,6 +100,17 @@ + + This configures the HTTP endpoint for Yarn Daemons.The following + values are supported: + - HTTP_ONLY : Service is provided only on http + - HTTPS_ONLY : Service is provided only on https + + yarn.http.policy + HTTP_ONLY + + + The http address of the RM web application. yarn.resourcemanager.webapp.address ${yarn.resourcemanager.hostname}:8088 diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java index 79b9d7a..a59910b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java @@ -28,6 +28,8 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.http.HttpConfig; +import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.service.CompositeService; @@ -394,9 +396,16 @@ public static void main(String[] args) { StringUtils.startupShutdownMessage(NodeManager.class, args, LOG); NodeManager nodeManager = new NodeManager(); Configuration conf = new YarnConfiguration(); + setHttpPolicy(conf); nodeManager.initAndStartNodeManager(conf, false); } + private static void setHttpPolicy(Configuration conf) { + HttpConfig.setPolicy(Policy.fromString(conf.get( + YarnConfiguration.YARN_HTTP_POLICY_KEY, + YarnConfiguration.YARN_HTTP_POLICY_DEFAULT))); + } + @VisibleForTesting @Private public NodeStatusUpdater getNodeStatusUpdater() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java index 3a05921..e46c2bf 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java @@ -28,6 +28,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.HttpConfig; +import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.source.JvmMetrics; import org.apache.hadoop.security.SecurityUtil; @@ -935,6 +936,7 @@ public static void main(String argv[]) { ShutdownHookManager.get().addShutdownHook( new CompositeServiceShutdownHook(resourceManager), SHUTDOWN_HOOK_PRIORITY); + setHttpPolicy(conf); resourceManager.init(conf); resourceManager.start(); } catch (Throwable t) { @@ -942,4 +944,10 @@ public static void main(String argv[]) { System.exit(-1); } } + + private static void setHttpPolicy(Configuration conf) { + HttpConfig.setPolicy(Policy.fromString(conf.get( + YarnConfiguration.YARN_HTTP_POLICY_KEY, + YarnConfiguration.YARN_HTTP_POLICY_DEFAULT))); + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/ProxyUriUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/ProxyUriUtils.java index 2be18d3..4481f60 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/ProxyUriUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/ProxyUriUtils.java @@ -18,7 +18,11 @@ package org.apache.hadoop.yarn.server.webproxy; -import static org.apache.hadoop.yarn.util.StringHelper.ujoin; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.http.HttpConfig; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.util.TrackingUriPlugin; import java.io.UnsupportedEncodingException; import java.net.URI; @@ -26,11 +30,7 @@ import java.net.URLEncoder; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.TrackingUriPlugin; +import static org.apache.hadoop.yarn.util.StringHelper.ujoin; public class ProxyUriUtils { @SuppressWarnings("unused") @@ -148,9 +148,9 @@ public static URI getUriFromAMUrl(String url) /* * check is made to make sure if AM reports with scheme then it will be * used by default otherwise it will default to the one configured using - * "hadoop.ssl.enabled". + * "yarn.http.policy". */ - return new URI(HttpConfig.getSchemePrefix() + url); + return new URI(HttpConfig.getSchemePrefix() + url); } else { return new URI(url); } @@ -168,9 +168,9 @@ public static URI getUriFromAMUrl(String scheme, String noSchemeUrl) /* * check is made to make sure if AM reports with scheme then it will be * used by default otherwise it will default to the one configured using - * "hadoop.ssl.enabled". + * "yarn.http.policy". */ - return new URI(scheme + "://" + noSchemeUrl); + return new URI(scheme + "://" + noSchemeUrl); } else { return new URI(noSchemeUrl); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java index 7f81f9b..b75cde2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmFilterInitializer.java @@ -18,9 +18,6 @@ package org.apache.hadoop.yarn.server.webproxy.amfilter; -import java.util.HashMap; -import java.util.Map; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; import org.apache.hadoop.http.FilterInitializer; @@ -28,6 +25,9 @@ import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; +import java.util.HashMap; +import java.util.Map; + public class AmFilterInitializer extends FilterInitializer { private static final String FILTER_NAME = "AM_PROXY_FILTER"; private static final String FILTER_CLASS = AmIpFilter.class.getCanonicalName();