commit b4f39759c7fea680b62549f521eb98da91fdb920 Author: Vihang Karajgaonkar Date: Fri Mar 24 17:22:20 2017 -0700 HIVE-16297 : Strip sensitive configurations before dumping diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java index 9ba08e532613311853fb1122e756d63f6991b96c..dc02803706164705335404df7d646add7c647fa7 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java @@ -94,11 +94,22 @@ public static StringBuilder dumpConfig(HiveConf conf) { public static void stripConfigurations(Configuration conf, Set hiddenSet) { for (String name : hiddenSet) { if (conf.get(name) != null) { - conf.set(name, ""); + conf.set(name, StringUtils.EMPTY); } } } + /** + * Searches the given configuration object and replaces all the configuration values for keys + * defined hive.conf.hidden.list by empty String + * + * @param conf - Configuration object which needs to be modified to remove sensitive keys + */ + public static void stripConfigurations(Configuration conf) { + Set hiddenSet = getHiddenSet(conf); + stripConfigurations(conf, hiddenSet); + } + public static void dumpConfig(Configuration originalConf, StringBuilder sb) { Set hiddenSet = getHiddenSet(originalConf); sb.append("Values omitted for security reason if present: ").append(hiddenSet).append("\n"); diff --git a/common/src/java/org/apache/hive/http/ConfServlet.java b/common/src/java/org/apache/hive/http/ConfServlet.java index 253df4f2d62e5f2397f76a50adf2fb6980873866..856a5d2dd8146537e88c7b7bd8b6a0d00133c6ed 100644 --- a/common/src/java/org/apache/hive/http/ConfServlet.java +++ b/common/src/java/org/apache/hive/http/ConfServlet.java @@ -26,6 +26,7 @@ import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConfUtil; /** * A servlet to print out the running configuration data. @@ -81,11 +82,14 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) * Guts of the servlet - extracted for easy testing. */ static void writeResponse(Configuration conf, Writer out, String format) - throws IOException, BadFormatException { + throws IOException, BadFormatException { + //redact the sensitive information from the configuration values + Configuration hconf = new Configuration(conf); + HiveConfUtil.stripConfigurations(hconf); if (FORMAT_JSON.equals(format)) { - Configuration.dumpConfiguration(conf, out); + Configuration.dumpConfiguration(hconf, out); } else if (FORMAT_XML.equals(format)) { - conf.writeXml(out); + hconf.writeXml(out); } else { throw new BadFormatException("Bad format: " + format); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index a9d03d060adeaa5cad6bef48a63c048f23819d01..4d727bad3c535379a7695ab865f4bba04d53159b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -463,13 +464,10 @@ public String getCounterName(Counter counter) { } private void logOutputFormatError(Configuration hconf, HiveException ex) { - StringWriter errorWriter = new StringWriter(); + StringBuilder errorWriter = new StringBuilder(); errorWriter.append("Failed to create output format; configuration: "); - try { - Configuration.dumpConfiguration(hconf, errorWriter); - } catch (IOException ex2) { - errorWriter.append("{ failed to dump configuration: " + ex2.getMessage() + " }"); - } + // redact sensitive information before logging + HiveConfUtil.dumpConfig(hconf, errorWriter); Properties tdp = null; if (this.conf.getTableInfo() != null && (tdp = this.conf.getTableInfo().getProperties()) != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java index 4c698994e7e970811f68c4123a2eacd1ce158a10..4ca8f938a41368584eae691100aae74ec2e36d17 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java @@ -356,12 +356,9 @@ public Serializable call(JobContext jc) throws Exception { private void logConfigurations(JobConf localJobConf) { if (LOG.isInfoEnabled()) { LOG.info("Logging job configuration: "); - StringWriter outWriter = new StringWriter(); - try { - Configuration.dumpConfiguration(localJobConf, outWriter); - } catch (IOException e) { - LOG.warn("Error logging job configuration", e); - } + StringBuilder outWriter = new StringBuilder(); + // redact sensitive information before logging + HiveConfUtil.dumpConfig(localJobConf, outWriter); LOG.info(outWriter.toString()); } }