diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index fdd8ecc..0c78ba7 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -516,6 +516,7 @@ minillaplocal.query.files=\ explainanalyze_2.q,\ explainuser_1.q,\ explainuser_4.q,\ + external_jdbc_auth.q,\ external_jdbc_table.q,\ fullouter_mapjoin_1_optimized.q,\ groupby2.q,\ diff --git a/jdbc-handler/pom.xml b/jdbc-handler/pom.xml index f90892f..ad421a1 100644 --- a/jdbc-handler/pom.xml +++ b/jdbc-handler/pom.xml @@ -113,6 +113,20 @@ test + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + org.apache.hive hive-common diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java index caa823f..3e79352 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java @@ -61,10 +61,7 @@ try { LOGGER.debug("Creating {} input splits", numSplits); - - if (dbAccessor == null) { - dbAccessor = DatabaseAccessorFactory.getAccessor(job); - } + dbAccessor = DatabaseAccessorFactory.getAccessor(job); int numRecords = numSplits <=1 ? Integer.MAX_VALUE : dbAccessor.getTotalNumberOfRecords(job); diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java index 55fc0ea..f4bea20 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java @@ -15,11 +15,12 @@ package org.apache.hive.storage.jdbc.conf; import java.io.IOException; + +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hive.storage.jdbc.conf.DatabaseType; import org.apache.hadoop.conf.Configuration; @@ -41,6 +42,8 @@ private static final Logger LOGGER = LoggerFactory.getLogger(JdbcStorageConfigManager.class); public static final String CONFIG_PREFIX = "hive.sql"; public static final String CONFIG_PWD = CONFIG_PREFIX + ".dbcp.password"; + public static final String CONFIG_PWD_KEYSTORE = CONFIG_PREFIX + ".dbcp.password.keystore"; + public static final String CONFIG_PWD_KEY = CONFIG_PREFIX + ".dbcp.password.key"; public static final String CONFIG_USERNAME = CONFIG_PREFIX + ".dbcp.username"; private static final EnumSet DEFAULT_REQUIRED_PROPERTIES = EnumSet.of(JdbcStorageConfig.DATABASE_TYPE, @@ -59,7 +62,9 @@ public static void copyConfigurationToJob(Properties props, Map checkRequiredPropertiesAreDefined(props); resolveMetadata(props); for (Entry entry : props.entrySet()) { - if (!String.valueOf(entry.getKey()).equals(CONFIG_PWD)) { + if (!String.valueOf(entry.getKey()).equals(CONFIG_PWD) && + !String.valueOf(entry.getKey()).equals(CONFIG_PWD_KEYSTORE) && + !String.valueOf(entry.getKey()).equals(CONFIG_PWD_KEY)) { jobProps.put(String.valueOf(entry.getKey()), String.valueOf(entry.getValue())); } } @@ -69,9 +74,14 @@ public static void copySecretsToJob(Properties props, Map jobSec throws HiveException, IOException { checkRequiredPropertiesAreDefined(props); resolveMetadata(props); - String secret = props.getProperty(CONFIG_PWD); - if (secret != null) { - jobSecrets.put(CONFIG_PWD, secret); + String passwd = props.getProperty(CONFIG_PWD); + if (passwd == null) { + String keystore = props.getProperty(CONFIG_PWD_KEYSTORE); + String key = props.getProperty(CONFIG_PWD_KEY); + passwd = Utilities.getPasswdFromKeystore(keystore, key); + } + if (passwd != null) { + jobSecrets.put(CONFIG_PWD, passwd); } } @@ -88,7 +98,6 @@ public static Configuration convertPropertiesToConfiguration(Properties props) return conf; } - private static void checkRequiredPropertiesAreDefined(Properties props) { DatabaseType dbType = null; diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java index b2ad9a6..8935319 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java @@ -16,10 +16,8 @@ import org.apache.commons.dbcp.BasicDataSourceFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.security.Credentials; -import org.apache.hadoop.security.UserGroupInformation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,7 +49,6 @@ protected static final int DEFAULT_FETCH_SIZE = 1000; protected static final Logger LOGGER = LoggerFactory.getLogger(GenericJdbcDatabaseAccessor.class); protected DataSource dbcpDataSource = null; - protected static final Text DBCP_PWD = new Text(DBCP_CONFIG_PREFIX + ".password"); public GenericJdbcDatabaseAccessor() { @@ -289,6 +286,9 @@ protected void initializeDatabaseConnection(Configuration conf) throws Exception } } + private String getFromProperties(Properties dbProperties, String key) { + return dbProperties.getProperty(key.replaceFirst(DBCP_CONFIG_PREFIX + "\\.", "")); + } protected Properties getConnectionPoolProperties(Configuration conf) throws Exception { // Create the default properties object @@ -303,10 +303,15 @@ protected Properties getConnectionPoolProperties(Configuration conf) throws Exce } // handle password - Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials(); - if (credentials.getSecretKey(DBCP_PWD) != null) { - LOGGER.info("found token in credentials"); - dbProperties.put(DBCP_PWD,new String(credentials.getSecretKey(DBCP_PWD))); + String passwd = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD); + if (passwd == null) { + String keystore = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD_KEYSTORE); + String key = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD_KEY); + passwd = Utilities.getPasswdFromKeystore(keystore, key); + } + + if (passwd != null) { + dbProperties.put(JdbcStorageConfigManager.CONFIG_PWD.replaceFirst(DBCP_CONFIG_PREFIX + "\\.", ""), passwd); } // essential properties that shouldn't be overridden by users diff --git a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java index e904774..b146633 100644 --- a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java +++ b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java @@ -18,11 +18,15 @@ import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hive.storage.jdbc.dao.DatabaseAccessor; +import org.apache.hive.storage.jdbc.dao.DatabaseAccessorFactory; import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.BDDMockito; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; import java.io.IOException; @@ -32,7 +36,8 @@ import static org.mockito.Matchers.any; import static org.mockito.Mockito.when; -@RunWith(MockitoJUnitRunner.class) +@RunWith(PowerMockRunner.class) +@PrepareForTest(DatabaseAccessorFactory.class) public class TestJdbcInputFormat { @Mock @@ -41,9 +46,10 @@ @Test public void testSplitLogic_noSpillOver() throws HiveJdbcDatabaseAccessException, IOException { + PowerMockito.mockStatic(DatabaseAccessorFactory.class); + BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor); JdbcInputFormat f = new JdbcInputFormat(); when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15); - f.setDbAccessor(mockDatabaseAccessor); JobConf conf = new JobConf(); conf.set("mapred.input.dir", "/temp"); @@ -58,9 +64,10 @@ public void testSplitLogic_noSpillOver() throws HiveJdbcDatabaseAccessException, @Test public void testSplitLogic_withSpillOver() throws HiveJdbcDatabaseAccessException, IOException { + PowerMockito.mockStatic(DatabaseAccessorFactory.class); + BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor); JdbcInputFormat f = new JdbcInputFormat(); when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15); - f.setDbAccessor(mockDatabaseAccessor); JobConf conf = new JobConf(); conf.set("mapred.input.dir", "/temp"); diff --git a/pom.xml b/pom.xml index 5008923..75ebe9b 100644 --- a/pom.xml +++ b/pom.xml @@ -188,6 +188,7 @@ 2.3 1.5.3 1.10.19 + 1.7.4 2.0.0-M5 4.1.17.Final 3.10.5.Final diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 74fb1ba..76a30eb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -202,7 +202,9 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.util.Progressable; import org.apache.hive.common.util.ACLConfigurationParser; import org.apache.hive.common.util.ReflectionUtil; @@ -2269,19 +2271,6 @@ public static void copyTableJobPropertiesToConf(TableDesc tbl, JobConf job) thro job.set(entry.getKey(), entry.getValue()); } } - - try { - Map jobSecrets = tbl.getJobSecrets(); - if (jobSecrets != null) { - for (Map.Entry entry : jobSecrets.entrySet()) { - job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes()); - UserGroupInformation.getCurrentUser().getCredentials() - .addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes()); - } - } - } catch (IOException e) { - throw new HiveException(e); - } } /** @@ -2307,18 +2296,24 @@ public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) throws job.set(entry.getKey(), entry.getValue()); } } + } - try { - Map jobSecrets = tbl.getJobSecrets(); - if (jobSecrets != null) { - for (Map.Entry entry : jobSecrets.entrySet()) { - job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes()); - UserGroupInformation.getCurrentUser().getCredentials() - .addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes()); + /** + * Copy job credentials to table properties + * @param tbl + */ + public static void copyJobSecretToTableProperties(TableDesc tbl) throws IOException { + Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials(); + for (Text key : credentials.getAllSecretKeys()) { + String keyString = key.toString(); + if (keyString.startsWith(TableDesc.SECRET_PREFIX + TableDesc.SECRET_DELIMIT)) { + String[] comps = keyString.split(TableDesc.SECRET_DELIMIT); + String tblName = comps[1]; + String keyName = comps[2]; + if (tbl.getTableName().equalsIgnoreCase(tblName)) { + tbl.getProperties().put(keyName, new String(credentials.getSecretKey(key))); } } - } catch (IOException e) { - throw new HiveException(e); } } @@ -4500,4 +4495,17 @@ public static int getBucketingVersion(final String versionStr) { } return bucketingVersion; } + + public static String getPasswdFromKeystore(String keystore, String key) throws IOException { + String passwd = null; + if (keystore != null && key != null) { + Configuration conf = new Configuration(); + conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, keystore); + char[] pwdCharArray = conf.getPassword(key); + if (pwdCharArray != null) { + passwd = new String(pwdCharArray); + } + } + return passwd; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java index 99b33a3..91868a4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java @@ -24,6 +24,8 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -93,6 +95,10 @@ public void configure(JobConf job) { // create map and fetch operators MapWork mrwork = Utilities.getMapWork(job); + for (PartitionDesc part : mrwork.getAliasToPartnInfo().values()) { + TableDesc tableDesc = part.getTableDesc(); + Utilities.copyJobSecretToTableProperties(tableDesc); + } CompilationOpContext runtimeCtx = new CompilationOpContext(); if (mrwork.getVectorMode()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java index 7cd853f..88dd12c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java @@ -22,6 +22,8 @@ import java.util.Iterator; import java.util.List; +import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -70,6 +72,10 @@ execContext = new ExecMapperContext(jc); // create map and fetch operators MapWork mrwork = Utilities.getMapWork(job); + for (PartitionDesc part : mrwork.getAliasToPartnInfo().values()) { + TableDesc tableDesc = part.getTableDesc(); + Utilities.copyJobSecretToTableProperties(tableDesc); + } CompilationOpContext runtimeCtx = new CompilationOpContext(); if (mrwork.getVectorMode()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java index ac43917..ea2e1fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java @@ -30,6 +30,8 @@ import java.util.concurrent.Callable; import org.apache.hadoop.hive.llap.LlapUtil; +import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -138,6 +140,11 @@ public Object call() { // TODO HIVE-14042. Cleanup may be required if exiting early. Utilities.setMapWork(jconf, mapWork); + for (PartitionDesc part : mapWork.getAliasToPartnInfo().values()) { + TableDesc tableDesc = part.getTableDesc(); + Utilities.copyJobSecretToTableProperties(tableDesc); + } + String prefixes = jconf.get(DagUtils.TEZ_MERGE_WORK_FILE_PREFIXES); if (prefixes != null) { mergeWorkList = new ArrayList(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index a86656c..6bac285 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -453,6 +453,7 @@ private void addSplitsForGroup(List dirs, TableScanOperator tableScan, Job ValidWriteIdList validMmWriteIdList = getMmValidWriteIds(conf, table, validWriteIdList); try { + Utilities.copyJobSecretToTableProperties(table); Utilities.copyTablePropertiesToConf(table, conf); if (tableScan != null) { AcidUtils.setAcidOperationalProperties(conf, tableScan.getConf().isTranscationalTable(), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 82c3ca9..8254ed9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -130,6 +130,7 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.RowSchema; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -2900,8 +2901,13 @@ private RelNode genTableLogicalPlan(String tableAlias, QB qb) throws SemanticExc final String url = tabMetaData.getProperty("hive.sql.jdbc.url"); final String driver = tabMetaData.getProperty("hive.sql.jdbc.driver"); final String user = tabMetaData.getProperty("hive.sql.dbcp.username"); - final String pswd = tabMetaData.getProperty("hive.sql.dbcp.password"); + String pswd = tabMetaData.getProperty("hive.sql.dbcp.password"); //final String query = tabMetaData.getProperty("hive.sql.query"); + if (pswd == null) { + String keystore = tabMetaData.getProperty("hive.sql.dbcp.password.keystore"); + String key = tabMetaData.getProperty("hive.sql.dbcp.password.key"); + pswd = Utilities.getPasswdFromKeystore(keystore, key); + } final String tableName = tabMetaData.getProperty("hive.sql.table"); final DataSource ds = JdbcSchema.dataSource(url, driver, user, pswd); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index 250a085..5229700 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -72,6 +72,7 @@ import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputFormat; @@ -981,6 +982,14 @@ public static void configureJobConf(TableDesc tableDesc, JobConf jobConf) { if (storageHandler != null) { storageHandler.configureJobConf(tableDesc, jobConf); } + if (tableDesc.getJobSecrets() != null) { + for (Map.Entry entry : tableDesc.getJobSecrets().entrySet()) { + String key = TableDesc.SECRET_PREFIX + TableDesc.SECRET_DELIMIT + + tableDesc.getTableName() + TableDesc.SECRET_DELIMIT + entry.getKey(); + jobConf.getCredentials().addSecretKey(new Text(key), entry.getValue().getBytes()); + } + tableDesc.getJobSecrets().clear(); + } } catch (HiveException e) { throw new RuntimeException(e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java index b73faa5..7993779 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java @@ -53,6 +53,8 @@ private java.util.Properties properties; private Map jobProperties; private Map jobSecrets; + public static final String SECRET_PREFIX = "TABLE_SECRET"; + public static final String SECRET_DELIMIT = "#"; public TableDesc() { }