commit a8018546cedd3e3a563673e106dd1b47d6a65933 Author: Daniel Dai Date: Sun Sep 30 00:35:04 2018 -0700 HIVE-20651: JdbcStorageHandler password should be encrypted diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index fdd8ecc..0c78ba7 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -516,6 +516,7 @@ minillaplocal.query.files=\ explainanalyze_2.q,\ explainuser_1.q,\ explainuser_4.q,\ + external_jdbc_auth.q,\ external_jdbc_table.q,\ fullouter_mapjoin_1_optimized.q,\ groupby2.q,\ diff --git a/jdbc-handler/pom.xml b/jdbc-handler/pom.xml index f90892f..ad421a1 100644 --- a/jdbc-handler/pom.xml +++ b/jdbc-handler/pom.xml @@ -113,6 +113,20 @@ test + + org.powermock + powermock-module-junit4 + ${powermock.version} + test + + + + org.powermock + powermock-api-mockito + ${powermock.version} + test + + org.apache.hive hive-common diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java index caa823f..3e79352 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java @@ -61,10 +61,7 @@ try { LOGGER.debug("Creating {} input splits", numSplits); - - if (dbAccessor == null) { - dbAccessor = DatabaseAccessorFactory.getAccessor(job); - } + dbAccessor = DatabaseAccessorFactory.getAccessor(job); int numRecords = numSplits <=1 ? Integer.MAX_VALUE : dbAccessor.getTotalNumberOfRecords(job); diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java index 55fc0ea..cd9d4e2 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java @@ -15,6 +15,8 @@ package org.apache.hive.storage.jdbc.conf; import java.io.IOException; + +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,6 +43,8 @@ private static final Logger LOGGER = LoggerFactory.getLogger(JdbcStorageConfigManager.class); public static final String CONFIG_PREFIX = "hive.sql"; public static final String CONFIG_PWD = CONFIG_PREFIX + ".dbcp.password"; + public static final String CONFIG_PWD_KEYSTORE = CONFIG_PREFIX + ".dbcp.password.keystore"; + public static final String CONFIG_PWD_KEY = CONFIG_PREFIX + ".dbcp.password.key"; public static final String CONFIG_USERNAME = CONFIG_PREFIX + ".dbcp.username"; private static final EnumSet DEFAULT_REQUIRED_PROPERTIES = EnumSet.of(JdbcStorageConfig.DATABASE_TYPE, @@ -59,7 +63,9 @@ public static void copyConfigurationToJob(Properties props, Map checkRequiredPropertiesAreDefined(props); resolveMetadata(props); for (Entry entry : props.entrySet()) { - if (!String.valueOf(entry.getKey()).equals(CONFIG_PWD)) { + if (!String.valueOf(entry.getKey()).equals(CONFIG_PWD) && + !String.valueOf(entry.getKey()).equals(CONFIG_PWD_KEYSTORE) && + !String.valueOf(entry.getKey()).equals(CONFIG_PWD_KEY)) { jobProps.put(String.valueOf(entry.getKey()), String.valueOf(entry.getValue())); } } @@ -69,9 +75,14 @@ public static void copySecretsToJob(Properties props, Map jobSec throws HiveException, IOException { checkRequiredPropertiesAreDefined(props); resolveMetadata(props); - String secret = props.getProperty(CONFIG_PWD); - if (secret != null) { - jobSecrets.put(CONFIG_PWD, secret); + String passwd = props.getProperty(CONFIG_PWD); + if (passwd == null) { + String keystore = props.getProperty(CONFIG_PWD_KEYSTORE); + String key = props.getProperty(CONFIG_PWD_KEY); + passwd = Utilities.getPasswdFromKeystore(keystore, key); + } + if (passwd != null) { + jobSecrets.put(CONFIG_PWD, passwd); } } @@ -88,7 +99,6 @@ public static Configuration convertPropertiesToConfiguration(Properties props) return conf; } - private static void checkRequiredPropertiesAreDefined(Properties props) { DatabaseType dbType = null; diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java index b2ad9a6..a2646f5 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java @@ -16,11 +16,13 @@ import org.apache.commons.dbcp.BasicDataSourceFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,7 +53,6 @@ protected static final int DEFAULT_FETCH_SIZE = 1000; protected static final Logger LOGGER = LoggerFactory.getLogger(GenericJdbcDatabaseAccessor.class); protected DataSource dbcpDataSource = null; - protected static final Text DBCP_PWD = new Text(DBCP_CONFIG_PREFIX + ".password"); public GenericJdbcDatabaseAccessor() { @@ -289,6 +290,9 @@ protected void initializeDatabaseConnection(Configuration conf) throws Exception } } + private String getFromProperties(Properties dbProperties, String key) { + return dbProperties.getProperty(key.replaceFirst(DBCP_CONFIG_PREFIX + "\\.", "")); + } protected Properties getConnectionPoolProperties(Configuration conf) throws Exception { // Create the default properties object @@ -303,10 +307,15 @@ protected Properties getConnectionPoolProperties(Configuration conf) throws Exce } // handle password - Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials(); - if (credentials.getSecretKey(DBCP_PWD) != null) { - LOGGER.info("found token in credentials"); - dbProperties.put(DBCP_PWD,new String(credentials.getSecretKey(DBCP_PWD))); + String passwd = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD); + if (passwd == null) { + String keystore = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD_KEYSTORE); + String key = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD_KEY); + passwd = Utilities.getPasswdFromKeystore(keystore, key); + } + + if (passwd != null) { + dbProperties.put(JdbcStorageConfigManager.CONFIG_PWD.replaceFirst(DBCP_CONFIG_PREFIX + "\\.", ""), passwd); } // essential properties that shouldn't be overridden by users diff --git a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java index e904774..b146633 100644 --- a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java +++ b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java @@ -18,11 +18,15 @@ import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hive.storage.jdbc.dao.DatabaseAccessor; +import org.apache.hive.storage.jdbc.dao.DatabaseAccessorFactory; import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.BDDMockito; import org.mockito.Mock; -import org.mockito.runners.MockitoJUnitRunner; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; import java.io.IOException; @@ -32,7 +36,8 @@ import static org.mockito.Matchers.any; import static org.mockito.Mockito.when; -@RunWith(MockitoJUnitRunner.class) +@RunWith(PowerMockRunner.class) +@PrepareForTest(DatabaseAccessorFactory.class) public class TestJdbcInputFormat { @Mock @@ -41,9 +46,10 @@ @Test public void testSplitLogic_noSpillOver() throws HiveJdbcDatabaseAccessException, IOException { + PowerMockito.mockStatic(DatabaseAccessorFactory.class); + BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor); JdbcInputFormat f = new JdbcInputFormat(); when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15); - f.setDbAccessor(mockDatabaseAccessor); JobConf conf = new JobConf(); conf.set("mapred.input.dir", "/temp"); @@ -58,9 +64,10 @@ public void testSplitLogic_noSpillOver() throws HiveJdbcDatabaseAccessException, @Test public void testSplitLogic_withSpillOver() throws HiveJdbcDatabaseAccessException, IOException { + PowerMockito.mockStatic(DatabaseAccessorFactory.class); + BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor); JdbcInputFormat f = new JdbcInputFormat(); when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15); - f.setDbAccessor(mockDatabaseAccessor); JobConf conf = new JobConf(); conf.set("mapred.input.dir", "/temp"); diff --git a/pom.xml b/pom.xml index 5008923..75ebe9b 100644 --- a/pom.xml +++ b/pom.xml @@ -188,6 +188,7 @@ 2.3 1.5.3 1.10.19 + 1.7.4 2.0.0-M5 4.1.17.Final 3.10.5.Final diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 74fb1ba..156c0ff 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -202,7 +202,9 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.util.Progressable; import org.apache.hive.common.util.ACLConfigurationParser; import org.apache.hive.common.util.ReflectionUtil; @@ -2269,19 +2271,6 @@ public static void copyTableJobPropertiesToConf(TableDesc tbl, JobConf job) thro job.set(entry.getKey(), entry.getValue()); } } - - try { - Map jobSecrets = tbl.getJobSecrets(); - if (jobSecrets != null) { - for (Map.Entry entry : jobSecrets.entrySet()) { - job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes()); - UserGroupInformation.getCurrentUser().getCredentials() - .addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes()); - } - } - } catch (IOException e) { - throw new HiveException(e); - } } /** @@ -2307,18 +2296,25 @@ public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) throws job.set(entry.getKey(), entry.getValue()); } } + } - try { - Map jobSecrets = tbl.getJobSecrets(); - if (jobSecrets != null) { - for (Map.Entry entry : jobSecrets.entrySet()) { - job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes()); - UserGroupInformation.getCurrentUser().getCredentials() - .addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes()); + /** + * Copy job credentials to table properties + * @param tbl + * @param job + */ + public static void copyJobSecretToTableProperties(TableDesc tbl) throws IOException { + Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials(); + for (Text key : credentials.getAllSecretKeys()) { + String keyString = key.toString(); + if (keyString.startsWith(TableDesc.SECRET_PREFIX + TableDesc.SECRET_DELIMIT)) { + String[] comps = keyString.split(TableDesc.SECRET_DELIMIT); + String tblName = comps[1]; + String keyName = comps[2]; + if (tbl.getTableName().equals(tblName)) { + tbl.getProperties().put(keyName, new String(credentials.getSecretKey(key))); } } - } catch (IOException e) { - throw new HiveException(e); } } @@ -4500,4 +4496,17 @@ public static int getBucketingVersion(final String versionStr) { } return bucketingVersion; } + + public static String getPasswdFromKeystore(String keystore, String key) throws IOException { + String passwd = null; + if (keystore != null && key != null) { + Configuration conf = new Configuration(); + conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, keystore); + char[] pwdCharArray = conf.getPassword(key); + if (pwdCharArray != null) { + passwd = new String(pwdCharArray); + } + } + return passwd; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java index 99b33a3..17370d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java @@ -24,6 +24,10 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.UserGroupInformation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -93,6 +97,10 @@ public void configure(JobConf job) { // create map and fetch operators MapWork mrwork = Utilities.getMapWork(job); + for (PartitionDesc part : mrwork.getAliasToPartnInfo().values()) { + TableDesc tableDesc = part.getTableDesc(); + Utilities.copyJobSecretToTableProperties(tableDesc); + } CompilationOpContext runtimeCtx = new CompilationOpContext(); if (mrwork.getVectorMode()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java index 7cd853f..88dd12c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java @@ -22,6 +22,8 @@ import java.util.Iterator; import java.util.List; +import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -70,6 +72,10 @@ execContext = new ExecMapperContext(jc); // create map and fetch operators MapWork mrwork = Utilities.getMapWork(job); + for (PartitionDesc part : mrwork.getAliasToPartnInfo().values()) { + TableDesc tableDesc = part.getTableDesc(); + Utilities.copyJobSecretToTableProperties(tableDesc); + } CompilationOpContext runtimeCtx = new CompilationOpContext(); if (mrwork.getVectorMode()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java index ac43917..ea2e1fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java @@ -30,6 +30,8 @@ import java.util.concurrent.Callable; import org.apache.hadoop.hive.llap.LlapUtil; +import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -138,6 +140,11 @@ public Object call() { // TODO HIVE-14042. Cleanup may be required if exiting early. Utilities.setMapWork(jconf, mapWork); + for (PartitionDesc part : mapWork.getAliasToPartnInfo().values()) { + TableDesc tableDesc = part.getTableDesc(); + Utilities.copyJobSecretToTableProperties(tableDesc); + } + String prefixes = jconf.get(DagUtils.TEZ_MERGE_WORK_FILE_PREFIXES); if (prefixes != null) { mergeWorkList = new ArrayList(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index a86656c..6bac285 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -453,6 +453,7 @@ private void addSplitsForGroup(List dirs, TableScanOperator tableScan, Job ValidWriteIdList validMmWriteIdList = getMmValidWriteIds(conf, table, validWriteIdList); try { + Utilities.copyJobSecretToTableProperties(table); Utilities.copyTablePropertiesToConf(table, conf); if (tableScan != null) { AcidUtils.setAcidOperationalProperties(conf, tableScan.getConf().isTranscationalTable(), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 82c3ca9..8254ed9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -130,6 +130,7 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.RowSchema; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -2900,8 +2901,13 @@ private RelNode genTableLogicalPlan(String tableAlias, QB qb) throws SemanticExc final String url = tabMetaData.getProperty("hive.sql.jdbc.url"); final String driver = tabMetaData.getProperty("hive.sql.jdbc.driver"); final String user = tabMetaData.getProperty("hive.sql.dbcp.username"); - final String pswd = tabMetaData.getProperty("hive.sql.dbcp.password"); + String pswd = tabMetaData.getProperty("hive.sql.dbcp.password"); //final String query = tabMetaData.getProperty("hive.sql.query"); + if (pswd == null) { + String keystore = tabMetaData.getProperty("hive.sql.dbcp.password.keystore"); + String key = tabMetaData.getProperty("hive.sql.dbcp.password.key"); + pswd = Utilities.getPasswdFromKeystore(keystore, key); + } final String tableName = tabMetaData.getProperty("hive.sql.table"); final DataSource ds = JdbcSchema.dataSource(url, driver, user, pswd); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index 250a085..5229700 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -72,6 +72,7 @@ import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputFormat; @@ -981,6 +982,14 @@ public static void configureJobConf(TableDesc tableDesc, JobConf jobConf) { if (storageHandler != null) { storageHandler.configureJobConf(tableDesc, jobConf); } + if (tableDesc.getJobSecrets() != null) { + for (Map.Entry entry : tableDesc.getJobSecrets().entrySet()) { + String key = TableDesc.SECRET_PREFIX + TableDesc.SECRET_DELIMIT + + tableDesc.getTableName() + TableDesc.SECRET_DELIMIT + entry.getKey(); + jobConf.getCredentials().addSecretKey(new Text(key), entry.getValue().getBytes()); + } + tableDesc.getJobSecrets().clear(); + } } catch (HiveException e) { throw new RuntimeException(e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java index b73faa5..7993779 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java @@ -53,6 +53,8 @@ private java.util.Properties properties; private Map jobProperties; private Map jobSecrets; + public static final String SECRET_PREFIX = "TABLE_SECRET"; + public static final String SECRET_DELIMIT = "#"; public TableDesc() { } diff --git a/ql/src/test/queries/clientpositive/external_jdbc_auth.q b/ql/src/test/queries/clientpositive/external_jdbc_auth.q new file mode 100644 index 0000000..acfb298 --- /dev/null +++ b/ql/src/test/queries/clientpositive/external_jdbc_auth.q @@ -0,0 +1,94 @@ +--! qt:dataset:src + +CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput'; + +FROM src + +SELECT + +dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;create=true','user1','passwd1', +'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ), + +dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1', +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'), + +dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1', +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'), + +dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1', +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'), + +dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1', +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76') + +limit 1; + +FROM src + +SELECT + +dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2;create=true','user2','passwd2', +'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ), + +dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2', +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'), + +dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2', +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'), + +dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2', +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'), + +dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2', +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767') + +limit 1; + + +CREATE EXTERNAL TABLE ext_auth1 +( + ikey int, + bkey bigint, + fkey float, + dkey double +) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", + "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;collation=TERRITORY_BASED:PRIMARY", + "hive.sql.dbcp.username" = "user1", + "hive.sql.dbcp.password.keystore" = "jceks://file/${system:test.tmp.dir}/../../../data/files/test.jceks", + "hive.sql.dbcp.password.key" = "test_derby_auth1.password", + "hive.sql.table" = "SIMPLE_DERBY_TABLE1", + "hive.sql.dbcp.maxActive" = "1" +); + + +CREATE EXTERNAL TABLE ext_auth2 +( + ikey int, + bkey bigint, + fkey float, + dkey double +) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", + "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2;collation=TERRITORY_BASED:PRIMARY", + "hive.sql.dbcp.username" = "user2", + "hive.sql.dbcp.password.keystore" = "jceks://file/${system:test.tmp.dir}/../../../data/files/test.jceks", + "hive.sql.dbcp.password.key" = "test_derby_auth2.password", + "hive.sql.table" = "SIMPLE_DERBY_TABLE2", + "hive.sql.dbcp.maxActive" = "1" +); + +CREATE TABLE hive_table +( + ikey int +); + +INSERT INTO hive_table VALUES(20); + +(SELECT * FROM ext_auth1 JOIN hive_table ON ext_auth1.ikey=hive_table.ikey) UNION ALL (SELECT * FROM ext_auth2 JOIN hive_table ON ext_auth2.ikey=hive_table.ikey); diff --git a/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out b/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out new file mode 100644 index 0000000..badc8b9 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out @@ -0,0 +1,221 @@ +PREHOOK: query: CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput' +PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: dboutput +POSTHOOK: query: CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput' +POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Output: dboutput +PREHOOK: query: FROM src + +SELECT + +#### A masked pattern was here #### +'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76') + +limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: FROM src + +SELECT + +#### A masked pattern was here #### +'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76') + +limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0 0 0 0 0 +PREHOOK: query: FROM src + +SELECT + +#### A masked pattern was here #### +'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767') + +limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: FROM src + +SELECT + +#### A masked pattern was here #### +'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'), + +#### A masked pattern was here #### +'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767') + +limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0 0 0 0 0 +PREHOOK: query: CREATE EXTERNAL TABLE ext_auth1 +( + ikey int, + bkey bigint, + fkey float, + dkey double +) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", +#### A masked pattern was here #### + "hive.sql.dbcp.username" = "user1", +#### A masked pattern was here #### + "hive.sql.dbcp.password.key" = "test_derby_auth1.password", + "hive.sql.table" = "SIMPLE_DERBY_TABLE1", + "hive.sql.dbcp.maxActive" = "1" +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@ext_auth1 +POSTHOOK: query: CREATE EXTERNAL TABLE ext_auth1 +( + ikey int, + bkey bigint, + fkey float, + dkey double +) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", +#### A masked pattern was here #### + "hive.sql.dbcp.username" = "user1", +#### A masked pattern was here #### + "hive.sql.dbcp.password.key" = "test_derby_auth1.password", + "hive.sql.table" = "SIMPLE_DERBY_TABLE1", + "hive.sql.dbcp.maxActive" = "1" +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@ext_auth1 +PREHOOK: query: CREATE EXTERNAL TABLE ext_auth2 +( + ikey int, + bkey bigint, + fkey float, + dkey double +) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", +#### A masked pattern was here #### + "hive.sql.dbcp.username" = "user2", +#### A masked pattern was here #### + "hive.sql.dbcp.password.key" = "test_derby_auth2.password", + "hive.sql.table" = "SIMPLE_DERBY_TABLE2", + "hive.sql.dbcp.maxActive" = "1" +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@ext_auth2 +POSTHOOK: query: CREATE EXTERNAL TABLE ext_auth2 +( + ikey int, + bkey bigint, + fkey float, + dkey double +) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", +#### A masked pattern was here #### + "hive.sql.dbcp.username" = "user2", +#### A masked pattern was here #### + "hive.sql.dbcp.password.key" = "test_derby_auth2.password", + "hive.sql.table" = "SIMPLE_DERBY_TABLE2", + "hive.sql.dbcp.maxActive" = "1" +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@ext_auth2 +PREHOOK: query: CREATE TABLE hive_table +( + ikey int +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@hive_table +POSTHOOK: query: CREATE TABLE hive_table +( + ikey int +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@hive_table +PREHOOK: query: INSERT INTO hive_table VALUES(20) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@hive_table +POSTHOOK: query: INSERT INTO hive_table VALUES(20) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@hive_table +POSTHOOK: Lineage: hive_table.ikey SCRIPT [] +PREHOOK: query: (SELECT * FROM ext_auth1 JOIN hive_table ON ext_auth1.ikey=hive_table.ikey) UNION ALL (SELECT * FROM ext_auth2 JOIN hive_table ON ext_auth2.ikey=hive_table.ikey) +PREHOOK: type: QUERY +PREHOOK: Input: default@ext_auth1 +PREHOOK: Input: default@ext_auth2 +PREHOOK: Input: default@hive_table +#### A masked pattern was here #### +POSTHOOK: query: (SELECT * FROM ext_auth1 JOIN hive_table ON ext_auth1.ikey=hive_table.ikey) UNION ALL (SELECT * FROM ext_auth2 JOIN hive_table ON ext_auth2.ikey=hive_table.ikey) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ext_auth1 +POSTHOOK: Input: default@ext_auth2 +POSTHOOK: Input: default@hive_table +#### A masked pattern was here #### +20 20 20.0 20.0 20 +20 20 20.0 20.0 20