diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 2723dadd76418f23b6e6358d21304e3312ef1cbd..b2347bfce50f78fa18483c49580b32f1441f2dc4 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -40,12 +40,11 @@ import org.slf4j.LoggerFactory; import javax.security.auth.login.LoginException; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintStream; + +import java.io.*; import java.net.URL; +import java.net.URLDecoder; +import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -96,6 +95,49 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { this.isSparkConfigUpdated = isSparkConfigUpdated; } + public interface EncoderDecoder { + V encode(K key); + K decode(V value); + } + public static class DefaultEncoderDecoder implements EncoderDecoder { + + @Override + public String encode(String key) { + return key; + } + + @Override + public String decode(String value) { + return value; + } + } + public static class URLEncoderDecoder implements EncoderDecoder { + private static final String UTF_8 = "UTF-8"; + @Override + public String encode(String key) { + try { + return URLEncoder.encode(key, UTF_8); + } catch (UnsupportedEncodingException e) { + return key; + } + } + + @Override + public String decode(String value) { + try { + return URLDecoder.decode(value, UTF_8); + } catch (UnsupportedEncodingException e) { + return value; + } + } + } + public static class EncoderDecoderFactory { + public static final DefaultEncoderDecoder DEFAULT_ENCODER_DECODER = new DefaultEncoderDecoder(); + public static final URLEncoderDecoder URL_ENCODER_DECODER = new URLEncoderDecoder(); + } + + public static Map> confVarsEncoderDecoderMap + = new HashMap>(); static { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader == null) { @@ -116,6 +158,7 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { Set llapDaemonConfVarsSetLocal = new LinkedHashSet<>(); populateLlapDaemonVarsSet(llapDaemonConfVarsSetLocal); llapDaemonVarsSet = Collections.unmodifiableSet(llapDaemonConfVarsSetLocal); + confVarsEncoderDecoderMap.put(ConfVars.HIVEQUERYSTRING, EncoderDecoderFactory.URL_ENCODER_DECODER); } @InterfaceAudience.Private @@ -3280,10 +3323,8 @@ public void setBoolVar(ConfVars var, boolean val) { public static String getVar(Configuration conf, ConfVars var) { assert (var.valClass == String.class) : var.varname; - if (var.altName != null) { - return conf.get(var.varname, conf.get(var.altName, var.defaultStrVal)); - } - return conf.get(var.varname, var.defaultStrVal); + return var.altName != null ? conf.get(var.varname, conf.get(var.altName, var.defaultStrVal)) + : conf.get(var.varname, var.defaultStrVal); } public static String getTrimmedVar(Configuration conf, ConfVars var) { @@ -3306,10 +3347,9 @@ public static String getTrimmedVar(Configuration conf, ConfVars var) { } public static String getVar(Configuration conf, ConfVars var, String defaultVal) { - if (var.altName != null) { - return conf.get(var.varname, conf.get(var.altName, defaultVal)); - } - return conf.get(var.varname, defaultVal); + String ret = var.altName != null ? conf.get(var.varname, conf.get(var.altName, defaultVal)) + : conf.get(var.varname, defaultVal); + return confVarsEncoderDecoderMap.get(var).decode(ret); } public String getLogIdVar(String defaultValue) { @@ -3339,10 +3379,17 @@ public static ConfVars getMetaConf(String name) { return metaConfs.get(name); } + public String getVar(ConfVars var, EncoderDecoder encoderDecoder) { + return encoderDecoder.decode(getVar(var)); + } public String getVar(ConfVars var) { return getVar(this, var); } + public void setVar(ConfVars var, String val, EncoderDecoder encoderDecoder) { + setVar(var, encoderDecoder.encode(val)); + } + public void setVar(ConfVars var, String val) { setVar(this, var, val); } diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java index 365d5005d2a1f240b12bb4e9f9a920b3db4d32ba..73a96b0b42cad010c8c9ae0ee34b5fd83f031547 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java @@ -25,6 +25,8 @@ import org.junit.Assert; import org.junit.Test; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; import java.util.concurrent.TimeUnit; @@ -156,4 +158,13 @@ public void testSparkConfigUpdate(){ conf.setSparkConfigUpdated(false); Assert.assertFalse(conf.getSparkConfigUpdated()); } + @Test + public void testEncodingDecoding() throws UnsupportedEncodingException { + HiveConf conf = new HiveConf(); + String query = "select blah, '\u0001' from random_table"; + conf.setVar(ConfVars.HIVEQUERYSTRING, query, HiveConf.EncoderDecoderFactory.URL_ENCODER_DECODER); + Assert.assertEquals(URLEncoder.encode(query, "UTF-8"), conf.get(ConfVars.HIVEQUERYSTRING.varname)); + Assert.assertEquals(query, + conf.getVar(ConfVars.HIVEQUERYSTRING, HiveConf.EncoderDecoderFactory.URL_ENCODER_DECODER)); + } }