Index: ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java (revision b7e8d0c9c40cd3541aa89228e20372e1fa863c3a) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java (revision ) @@ -309,7 +309,7 @@ String name = entry.getKey(); String value = entry.getValue(); env[pos++] = name + "=" + value; - LOG.debug("Setting env: " + env[pos-1]); + LOG.debug("Setting env: " + name + "=" + Utilities.maskIfPassword(name, value)); } LOG.info("Executing: " + cmdLine); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision b7e8d0c9c40cd3541aa89228e20372e1fa863c3a) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision ) @@ -221,6 +221,13 @@ public static String REDUCENAME = "Reducer "; /** + * Constants for log masking + */ + private static String KEY_TO_MASK_WITH = "password"; + private static String MASKED_VALUE = "###_MASKED_###"; + + + /** * ReduceField: * KEY: record key * VALUE: record value @@ -3616,5 +3623,19 @@ return rowObjectInspector; } - + /** + * Returns MASKED_VALUE if the key contains KEY_TO_MASK_WITH or the original property otherwise. + * Used to mask environment variables, and properties in logs which contain passwords + * @param key The property key to check + * @param value The original value of the property + * @return The masked property value + */ + public static String maskIfPassword(String key, String value) { + if (key!=null && value!=null) { + if (key.toLowerCase().indexOf(KEY_TO_MASK_WITH) != -1) { + return MASKED_VALUE; + } + } + return value; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java (revision b7e8d0c9c40cd3541aa89228e20372e1fa863c3a) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java (revision ) @@ -28,6 +28,7 @@ import org.apache.commons.compress.utils.CharsetNames; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.session.SessionState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -98,7 +99,7 @@ sparkConf.put(propertyName, properties.getProperty(propertyName)); LOG.info(String.format( "load spark property from %s (%s -> %s).", - SPARK_DEFAULT_CONF_FILE, propertyName, value)); + SPARK_DEFAULT_CONF_FILE, propertyName, Utilities.maskIfPassword(propertyName,value))); } } } @@ -135,7 +136,7 @@ sparkConf.put(propertyName, value); LOG.info(String.format( "load spark property from hive configuration (%s -> %s).", - propertyName, value)); + propertyName, Utilities.maskIfPassword(propertyName,value))); } else if (propertyName.startsWith("yarn") && (sparkMaster.equals("yarn-client") || sparkMaster.equals("yarn-cluster"))) { String value = hiveConf.get(propertyName); @@ -145,7 +146,7 @@ sparkConf.put("spark.hadoop." + propertyName, value); LOG.info(String.format( "load yarn property from hive configuration in %s mode (%s -> %s).", - sparkMaster, propertyName, value)); + sparkMaster, propertyName, Utilities.maskIfPassword(propertyName,value))); } else if (propertyName.equals(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)) { String value = hiveConf.get(propertyName); if (value != null && !value.isEmpty()) { @@ -158,7 +159,7 @@ String value = hiveConf.get(propertyName); sparkConf.put("spark.hadoop." + propertyName, value); LOG.info(String.format( - "load HBase configuration (%s -> %s).", propertyName, value)); + "load HBase configuration (%s -> %s).", propertyName, Utilities.maskIfPassword(propertyName,value))); } if (RpcConfiguration.HIVE_SPARK_RSC_CONFIGS.contains(propertyName)) { @@ -166,7 +167,7 @@ sparkConf.put(propertyName, value); LOG.info(String.format( "load RPC property from hive configuration (%s -> %s).", - propertyName, value)); + propertyName, Utilities.maskIfPassword(propertyName,value))); } } Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java (revision b7e8d0c9c40cd3541aa89228e20372e1fa863c3a) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java (revision ) @@ -151,4 +151,14 @@ Assert.assertEquals("(ds%3D1)000005", newTaskID); } + public void testMaskIfPassword() { + Assert.assertNull(Utilities.maskIfPassword("",null)); + Assert.assertNull(Utilities.maskIfPassword(null,null)); + Assert.assertEquals("test",Utilities.maskIfPassword(null,"test")); + Assert.assertEquals("test2",Utilities.maskIfPassword("any","test2")); + Assert.assertEquals("###_MASKED_###",Utilities.maskIfPassword("password","test3")); + Assert.assertEquals("###_MASKED_###",Utilities.maskIfPassword("a_passWord","test4")); + Assert.assertEquals("###_MASKED_###",Utilities.maskIfPassword("password_a","test5")); + Assert.assertEquals("###_MASKED_###",Utilities.maskIfPassword("a_PassWord_a","test6")); + } }