diff --git beeline/pom.xml beeline/pom.xml
index 6ec1d1a..750db5e 100644
--- beeline/pom.xml
+++ beeline/pom.xml
@@ -48,7 +48,6 @@
org.apache.hive
hive-shims
${project.version}
- runtime
diff --git beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
index de3ad4e..1078ca3 100644
--- beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
+++ beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
@@ -48,6 +48,7 @@
import org.apache.hadoop.hive.metastore.HiveMetaException;
import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hive.beeline.HiveSchemaHelper.NestedScriptParser;
public class HiveSchemaTool {
@@ -72,7 +73,12 @@ public HiveSchemaTool(String hiveHome, HiveConf hiveConf, String dbType)
this.dbType = dbType;
this.metaStoreSchemaInfo = new MetaStoreSchemaInfo(hiveHome, hiveConf, dbType);
userName = hiveConf.get(ConfVars.METASTORE_CONNECTION_USER_NAME.varname);
- passWord = hiveConf.get(HiveConf.ConfVars.METASTOREPWD.varname);
+ try {
+ passWord = ShimLoader.getHadoopShims().getPassword(hiveConf,
+ HiveConf.ConfVars.METASTOREPWD.varname);
+ } catch (IOException err) {
+ throw new HiveMetaException("Error getting metastore password", err);
+ }
}
public HiveConf getHiveConf() {
diff --git itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithCredentialProvider.java itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithCredentialProvider.java
new file mode 100644
index 0000000..f9b698e
--- /dev/null
+++ itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithCredentialProvider.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import java.io.File;
+import java.lang.reflect.Method;
+import java.util.List;
+
+import org.junit.*;
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
+
+public class TestPasswordWithCredentialProvider {
+
+ public static boolean doesHadoopPasswordAPIExist() {
+ boolean foundMethod = false;
+ try {
+ Method getPasswordMethod = Configuration.class.getMethod("getPassword", String.class);
+ foundMethod = true;
+ } catch (NoSuchMethodException err) {
+ }
+ return foundMethod;
+ }
+
+ private static final File tmpDir =
+ new File(System.getProperty("test.tmp.dir"), "creds");
+
+ private static Object invoke(Class objClass, Object obj, String methodName, Object ... args)
+ throws Exception {
+ Class[] argTypes = new Class[args.length];
+ for (int idx = 0; idx < args.length; ++idx) {
+ argTypes[idx] = args[idx].getClass();
+ }
+ Method method = objClass.getMethod(methodName, argTypes);
+ return method.invoke(obj, args);
+ }
+
+ @Test
+ public void testPassword() throws Exception {
+ if (!doesHadoopPasswordAPIExist()) {
+ System.out.println("Skipping Password API test"
+ + " because this version of hadoop-2 does not support the password API.");
+ return;
+ }
+
+ String credName = "my.password";
+ String credName2 = "my.password2";
+ String credName3 = "my.password3";
+ String hiveConfPassword = "conf value";
+ String credPassword = "cred value";
+ String confOnlyPassword = "abcdefg";
+ String credOnlyPassword = "12345";
+
+ // Set up conf
+ Configuration conf = new Configuration();
+ conf.set(credName, hiveConfPassword); // Will be superceded by credential provider
+ conf.set(credName2, confOnlyPassword); // Will not be superceded
+ assertEquals(hiveConfPassword, conf.get(credName));
+ assertEquals(confOnlyPassword, conf.get(credName2));
+ assertNull("credName3 should not exist in HiveConf", conf.get(credName3));
+
+ // Configure getPassword() to fall back to conf if credential doesn't have entry
+ conf.set("hadoop.security.credential.clear-text-fallback", "true");
+
+ // Set up CredentialProvider
+ conf.set("hadoop.security.credential.provider.path", "jceks://file/" + tmpDir + "/test.jks");
+
+ // CredentialProvider/CredentialProviderFactory may not exist, depending on the version of
+ // hadoop-2 being used to build Hive. Use reflection to do the following lines
+ // to allow the test to compile regardless of what version of hadoop-2.
+ // Update credName entry in the credential provider.
+ //CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
+ //provider.createCredentialEntry(credName, credPassword.toCharArray());
+ //provider.createCredentialEntry(credName3, credOnlyPassword.toCharArray());
+ //provider.flush();
+
+ Class credentialProviderClass =
+ Class.forName("org.apache.hadoop.security.alias.CredentialProvider");
+ Class credentialProviderFactoryClass =
+ Class.forName("org.apache.hadoop.security.alias.CredentialProviderFactory");
+ Object provider =
+ ((List) invoke(credentialProviderFactoryClass, null, "getProviders", conf))
+ .get(0);
+ invoke(credentialProviderClass, provider, "createCredentialEntry", credName, credPassword.toCharArray());
+ invoke(credentialProviderClass, provider, "createCredentialEntry", credName3, credOnlyPassword.toCharArray());
+ invoke(credentialProviderClass, provider, "flush");
+
+ // If credential provider has entry for our credential, then it should be used
+ assertEquals("getPassword() should use match value in credential provider",
+ credPassword, ShimLoader.getHadoopShims().getPassword(conf, credName));
+ // If cred provider doesn't have entry, fall back to conf
+ assertEquals("getPassword() should match value from conf",
+ confOnlyPassword, ShimLoader.getHadoopShims().getPassword(conf, credName2));
+ // If cred provider has entry and conf does not, cred provider is used.
+ // This is our use case of not having passwords stored in in the clear in hive conf files.
+ assertEquals("getPassword() should use credential provider if conf has no value",
+ credOnlyPassword, ShimLoader.getHadoopShims().getPassword(conf, credName3));
+ // If neither cred provider or conf have entry, return null;
+ assertNull("null if neither cred provider or conf have entry",
+ ShimLoader.getHadoopShims().getPassword(conf, "nonexistentkey"));
+ }
+}
diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithConfig.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithConfig.java
new file mode 100644
index 0000000..4c3ae19
--- /dev/null
+++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithConfig.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import org.junit.*;
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
+
+public class TestPasswordWithConfig {
+ @Test
+ public void testPassword() throws Exception {
+ String key1 = "key1";
+ String key2 = "key2";
+ String val1 = "value1";
+ Configuration conf = new Configuration();
+ conf.set(key1, val1);
+
+ assertEquals("key1 should exist in config", val1, ShimLoader.getHadoopShims().getPassword(conf, key1));
+ assertNull("key2 should not exist in config", ShimLoader.getHadoopShims().getPassword(conf, key2));
+ }
+}
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
index 3ab5827..30cf814 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.metastore.txn;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.shims.ShimLoader;
import java.sql.Connection;
import java.sql.Driver;
@@ -201,7 +202,8 @@ private static Connection getConnection() throws Exception {
Properties prop = new Properties();
String driverUrl = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORECONNECTURLKEY);
String user = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORE_CONNECTION_USER_NAME);
- String passwd = HiveConf.getVar(conf, HiveConf.ConfVars.METASTOREPWD);
+ String passwd = ShimLoader.getHadoopShims().getPassword(conf,
+ HiveConf.ConfVars.METASTOREPWD.varname);
prop.put("user", user);
prop.put("password", passwd);
return driver.connect(driverUrl, prop);
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index e78cd75..063dee6 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -32,9 +32,12 @@
import org.apache.hadoop.hive.common.ValidTxnListImpl;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.*;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.util.StringUtils;
import javax.sql.DataSource;
+
+import java.io.IOException;
import java.sql.*;
import java.util.*;
@@ -1602,7 +1605,13 @@ private static synchronized void setupJdbcConnectionPool(HiveConf conf) throws S
String driverUrl = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORECONNECTURLKEY);
String user = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORE_CONNECTION_USER_NAME);
- String passwd = HiveConf.getVar(conf, HiveConf.ConfVars.METASTOREPWD);
+ String passwd;
+ try {
+ passwd = ShimLoader.getHadoopShims().getPassword(conf,
+ HiveConf.ConfVars.METASTOREPWD.varname);
+ } catch (IOException err) {
+ throw new SQLException("Error getting metastore password", err);
+ }
String connectionPooler = HiveConf.getVar(conf,
HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE).toLowerCase();
diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
index b009a88..37b05fc 100644
--- service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
+++ service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
@@ -20,7 +20,9 @@
import java.net.InetSocketAddress;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.CLIService;
import org.apache.thrift.TProcessorFactory;
@@ -73,8 +75,10 @@ public void run() {
throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname +
" Not configured for SSL connection");
}
+ String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf,
+ HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum,
- keyStorePath, hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD));
+ keyStorePath, keyStorePassword);
}
TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket)
.processorFactory(processorFactory)
diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
index 98d75b5..c5abaeb 100644
--- service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
+++ service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
@@ -20,6 +20,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
import org.apache.hive.service.auth.HiveAuthFactory;
@@ -83,7 +84,8 @@ public void run() {
if (useSsl) {
String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
- String keyStorePassword = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD);
+ String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf,
+ HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
if (keyStorePath.isEmpty()) {
throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname +
" Not configured for SSL connection");
diff --git shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
index 5d70e03..aec6796 100644
--- shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
+++ shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
@@ -889,4 +889,10 @@ public void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
throws IOException, AccessControlException, Exception {
DefaultFileAccess.checkFileAccess(fs, stat, action);
}
+
+ @Override
+ public String getPassword(Configuration conf, String name) {
+ // No password API, just retrieve value from conf
+ return conf.get(name);
+ }
}
diff --git shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
index b85a69c..8bc871ac 100644
--- shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
+++ shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
@@ -507,4 +507,10 @@ public FileSystem getNonCachedFileSystem(URI uri, Configuration conf) throws IOE
public void getMergedCredentials(JobConf jobConf) throws IOException {
throw new IOException("Merging of credentials not supported in this version of hadoop");
}
+
+ @Override
+ public String getPassword(Configuration conf, String name) {
+ // No password API, just retrieve value from conf
+ return conf.get(name);
+ }
}
diff --git shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
index 40757f5..5ae5299 100644
--- shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -735,6 +735,7 @@ public void getMergedCredentials(JobConf jobConf) throws IOException {
}
protected static final Method accessMethod;
+ protected static final Method getPasswordMethod;
static {
Method m = null;
@@ -744,6 +745,14 @@ public void getMergedCredentials(JobConf jobConf) throws IOException {
// This version of Hadoop does not support FileSystem.access().
}
accessMethod = m;
+
+ try {
+ m = Configuration.class.getMethod("getPassword", String.class);
+ } catch (NoSuchMethodException err) {
+ // This version of Hadoop does not support getPassword(), just retrieve password from conf.
+ m = null;
+ }
+ getPasswordMethod = m;
}
@Override
@@ -779,4 +788,22 @@ private static Exception wrapAccessException(Exception err) {
}
return err;
}
+
+ @Override
+ public String getPassword(Configuration conf, String name) throws IOException {
+ if (getPasswordMethod == null) {
+ // Just retrieve value from conf
+ return conf.get(name);
+ } else {
+ try {
+ char[] pw = (char[]) getPasswordMethod.invoke(conf, name);
+ if (pw == null) {
+ return null;
+ }
+ return new String(pw);
+ } catch (Exception err) {
+ throw new IOException(err.getMessage(), err);
+ }
+ }
+ }
}
diff --git shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
index 697d4b7..eed4f5b 100644
--- shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
+++ shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
@@ -685,4 +685,13 @@ public void setFullFileStatus(Configuration conf, HdfsFileStatus sourceStatus,
*/
public void checkFileAccess(FileSystem fs, FileStatus status, FsAction action)
throws IOException, AccessControlException, Exception;
+
+ /**
+ * Use password API (if available) to fetch credentials/password
+ * @param conf
+ * @param name
+ * @return
+ */
+ public String getPassword(Configuration conf, String name) throws IOException;
+
}