diff --git common/src/java/org/apache/hadoop/hive/common/LogUtils.java common/src/java/org/apache/hadoop/hive/common/LogUtils.java index 599e798..c2a0d9a 100644 --- common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -42,6 +42,12 @@ private static final String HIVE_EXEC_L4J = "hive-exec-log4j2.properties"; private static final Logger l4j = LoggerFactory.getLogger(LogUtils.class); + /** + * Constants for log masking + */ + private static String KEY_TO_MASK_WITH = "password"; + private static String MASKED_VALUE = "###_MASKED_###"; + @SuppressWarnings("serial") public static class LogInitializationException extends Exception { public LogInitializationException(String msg) { @@ -171,4 +177,20 @@ private static void logConfigLocation(HiveConf conf) throws LogInitializationExc + conf.getHiveSiteLocation().getPath()); } } + + /** + * Returns MASKED_VALUE if the key contains KEY_TO_MASK_WITH or the original property otherwise. + * Used to mask environment variables, and properties in logs which contain passwords + * @param key The property key to check + * @param value The original value of the property + * @return The masked property value + */ + public static String maskIfPassword(String key, String value) { + if (key!=null && value!=null) { + if (key.toLowerCase().indexOf(KEY_TO_MASK_WITH) != -1) { + return MASKED_VALUE; + } + } + return value; + } } diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 178afbc..5a29684 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3777,8 +3777,8 @@ private void initialize(Class cls) { // setup list of conf vars that are not allowed to change runtime setupRestrictList(); - setupHiddenSet(); - + hiddenSet.clear(); + hiddenSet.addAll(HiveConfUtil.getHiddenSet(this)); } /** @@ -4130,25 +4130,11 @@ private void setupRestrictList() { restrictList.add(ConfVars.HIVE_CONF_INTERNAL_VARIABLE_LIST.varname); } - private void setupHiddenSet() { - String hiddenListStr = this.getVar(ConfVars.HIVE_CONF_HIDDEN_LIST); - hiddenSet.clear(); - if (hiddenListStr != null) { - for (String entry : hiddenListStr.split(",")) { - hiddenSet.add(entry.trim()); - } - } - } - /** * Strips hidden config entries from configuration */ public void stripHiddenConfigurations(Configuration conf) { - for (String name : hiddenSet) { - if (conf.get(name) != null) { - conf.set(name, ""); - } - } + HiveConfUtil.stripConfigurations(conf, hiddenSet); } /** diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java index 073a978..16c2eaf 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java @@ -25,9 +25,11 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.StringTokenizer; /** @@ -59,7 +61,42 @@ public static StringBuilder dumpConfig(HiveConf conf) { dumpConfig(conf, sb); return sb.append("END========\"new HiveConf()\"========\n"); } - public static void dumpConfig(Configuration conf, StringBuilder sb) { + + /** + * Getting the set of the hidden configurations + * @param configuration The original configuration + * @return The list of the configuration values to hide + */ + public static Set getHiddenSet(Configuration configuration) { + Set hiddenSet = new HashSet(); + String hiddenListStr = HiveConf.getVar(configuration, HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST); + if (hiddenListStr != null) { + for (String entry : hiddenListStr.split(",")) { + hiddenSet.add(entry.trim()); + } + } + return hiddenSet; + } + + /** + * Strips hidden config entries from configuration + * @param conf The configuration to strip from + * @param hiddenSet The values to strip + */ + public static void stripConfigurations(Configuration conf, Set hiddenSet) { + for (String name : hiddenSet) { + if (conf.get(name) != null) { + conf.set(name, ""); + } + } + } + + public static void dumpConfig(Configuration originalConf, StringBuilder sb) { + Set hiddenSet = getHiddenSet(originalConf); + sb.append("Values omitted for security reason if present: ").append(hiddenSet).append("\n"); + Configuration conf = new Configuration(originalConf); + stripConfigurations(conf, hiddenSet); + Iterator> configIter = conf.iterator(); List> configVals = new ArrayList<>(); while(configIter.hasNext()) { diff --git common/src/test/org/apache/hadoop/hive/common/TestLogUtils.java common/src/test/org/apache/hadoop/hive/common/TestLogUtils.java new file mode 100644 index 0000000..923ac2d --- /dev/null +++ common/src/test/org/apache/hadoop/hive/common/TestLogUtils.java @@ -0,0 +1,34 @@ +package org.apache.hadoop.hive.common; +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import org.junit.Assert; +import org.junit.Test; + +public class TestLogUtils { + @Test + public void testMaskIfPassword() { + Assert.assertNull(LogUtils.maskIfPassword("",null)); + Assert.assertNull(LogUtils.maskIfPassword(null,null)); + Assert.assertEquals("test", LogUtils.maskIfPassword(null,"test")); + Assert.assertEquals("test2", LogUtils.maskIfPassword("any","test2")); + Assert.assertEquals("###_MASKED_###", LogUtils.maskIfPassword("password","test3")); + Assert.assertEquals("###_MASKED_###", LogUtils.maskIfPassword("a_passWord","test4")); + Assert.assertEquals("###_MASKED_###", LogUtils.maskIfPassword("password_a","test5")); + Assert.assertEquals("###_MASKED_###", LogUtils.maskIfPassword("a_PassWord_a","test6")); + } +} diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java index dd1208b..54d0907 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java @@ -200,14 +200,19 @@ private void init() { * been installed. We need pass some properties to that client to make sure it connects to the * right Metastore, configures Tez, etc. Here we look for such properties in hive config, * and set a comma-separated list of key values in {@link #HIVE_PROPS_NAME}. + * The HIVE_CONF_HIDDEN_LIST should be handled separately too - this also should be copied from + * the hive config to the webhcat config if not defined there. * Note that the user may choose to set the same keys in HIVE_PROPS_NAME directly, in which case * those values should take precedence. */ private void handleHiveProperties() { HiveConf hiveConf = new HiveConf();//load hive-site.xml from classpath List interestingPropNames = Arrays.asList( - "hive.metastore.uris","hive.metastore.sasl.enabled", - "hive.metastore.execute.setugi","hive.execution.engine"); + HiveConf.ConfVars.METASTOREURIS.varname, + HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, + HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI.varname, + HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname, + HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST.varname); //each items is a "key=value" format List webhcatHiveProps = new ArrayList(hiveProps()); @@ -232,6 +237,12 @@ private void handleHiveProperties() { hiveProps.append(hiveProps.length() > 0 ? "," : "").append(StringUtils.escapeString(whProp)); } set(HIVE_PROPS_NAME, hiveProps.toString()); + // Setting the hidden list + String hiddenProperties = hiveConf.get(HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST.varname); + if (this.get(HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST.varname) == null + && hiddenProperties!=null) { + set(HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST.varname, hiddenProperties); + } } private static void logConfigLoadAttempt(String path) { diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java index 83584d3..f629a78 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java @@ -42,6 +42,7 @@ import javax.ws.rs.core.UriBuilder; +import org.apache.hadoop.hive.common.LogUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -483,7 +484,8 @@ public static StringBuilder dumpPropMap(String header, Map map) } } else { - sb.append(propKey).append('=').append(map.get(propKey)).append('\n'); + sb.append(propKey).append('=').append(LogUtils.maskIfPassword(propKey, map.get(propKey))); + sb.append('\n'); } } return sb.append("END").append(header).append('\n'); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 202adf3..8f7bbb2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -225,13 +225,6 @@ public static String REDUCENAME = "Reducer "; /** - * Constants for log masking - */ - private static String KEY_TO_MASK_WITH = "password"; - private static String MASKED_VALUE = "###_MASKED_###"; - - - /** * ReduceField: * KEY: record key * VALUE: record value @@ -3704,20 +3697,4 @@ public static boolean checkLlapIOSupportedTypes(final List readColumnNam } return result; } - - /** - * Returns MASKED_VALUE if the key contains KEY_TO_MASK_WITH or the original property otherwise. - * Used to mask environment variables, and properties in logs which contain passwords - * @param key The property key to check - * @param value The original value of the property - * @return The masked property value - */ - public static String maskIfPassword(String key, String value) { - if (key!=null && value!=null) { - if (key.toLowerCase().indexOf(KEY_TO_MASK_WITH) != -1) { - return MASKED_VALUE; - } - } - return value; - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java index d4b17d7..127d3e6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java @@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.io.CachingPrintStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -309,7 +310,7 @@ public int executeInChildVM(DriverContext driverContext) { String name = entry.getKey(); String value = entry.getValue(); env[pos++] = name + "=" + value; - LOG.debug("Setting env: " + name + "=" + Utilities.maskIfPassword(name, value)); + LOG.debug("Setting env: " + name + "=" + LogUtils.maskIfPassword(name, value)); } LOG.info("Executing: " + cmdLine); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java index ed87adb..8867415 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java @@ -28,6 +28,7 @@ import org.apache.commons.compress.utils.CharsetNames; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.session.SessionState; import org.slf4j.Logger; @@ -99,7 +100,7 @@ public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws Ex sparkConf.put(propertyName, properties.getProperty(propertyName)); LOG.info(String.format( "load spark property from %s (%s -> %s).", - SPARK_DEFAULT_CONF_FILE, propertyName, Utilities.maskIfPassword(propertyName,value))); + SPARK_DEFAULT_CONF_FILE, propertyName, LogUtils.maskIfPassword(propertyName,value))); } } } @@ -136,7 +137,7 @@ public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws Ex sparkConf.put(propertyName, value); LOG.info(String.format( "load spark property from hive configuration (%s -> %s).", - propertyName, Utilities.maskIfPassword(propertyName,value))); + propertyName, LogUtils.maskIfPassword(propertyName,value))); } else if (propertyName.startsWith("yarn") && (sparkMaster.equals("yarn-client") || sparkMaster.equals("yarn-cluster"))) { String value = hiveConf.get(propertyName); @@ -146,7 +147,7 @@ public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws Ex sparkConf.put("spark.hadoop." + propertyName, value); LOG.info(String.format( "load yarn property from hive configuration in %s mode (%s -> %s).", - sparkMaster, propertyName, Utilities.maskIfPassword(propertyName,value))); + sparkMaster, propertyName, LogUtils.maskIfPassword(propertyName,value))); } else if (propertyName.equals(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)) { String value = hiveConf.get(propertyName); if (value != null && !value.isEmpty()) { @@ -159,7 +160,7 @@ public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws Ex String value = hiveConf.get(propertyName); sparkConf.put("spark.hadoop." + propertyName, value); LOG.info(String.format( - "load HBase configuration (%s -> %s).", propertyName, Utilities.maskIfPassword(propertyName,value))); + "load HBase configuration (%s -> %s).", propertyName, LogUtils.maskIfPassword(propertyName,value))); } if (RpcConfiguration.HIVE_SPARK_RSC_CONFIGS.contains(propertyName)) { @@ -167,7 +168,7 @@ public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws Ex sparkConf.put(propertyName, value); LOG.info(String.format( "load RPC property from hive configuration (%s -> %s).", - propertyName, Utilities.maskIfPassword(propertyName,value))); + propertyName, LogUtils.maskIfPassword(propertyName,value))); } } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index b095608..d2060a1 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -246,16 +246,4 @@ private Path setupTempDirWithSingleOutputFile(Configuration hconf) throws IOExce FileSystem.getLocal(hconf).create(taskOutputPath).close(); return tempDirPath; } - - @Test - public void testMaskIfPassword() { - Assert.assertNull(Utilities.maskIfPassword("",null)); - Assert.assertNull(Utilities.maskIfPassword(null,null)); - Assert.assertEquals("test",Utilities.maskIfPassword(null,"test")); - Assert.assertEquals("test2",Utilities.maskIfPassword("any","test2")); - Assert.assertEquals("###_MASKED_###",Utilities.maskIfPassword("password","test3")); - Assert.assertEquals("###_MASKED_###",Utilities.maskIfPassword("a_passWord","test4")); - Assert.assertEquals("###_MASKED_###",Utilities.maskIfPassword("password_a","test5")); - Assert.assertEquals("###_MASKED_###",Utilities.maskIfPassword("a_PassWord_a","test6")); - } }