diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 1df5c74..de7aea6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -98,6 +99,9 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider; +import org.apache.hadoop.security.alias.CredentialProvider; +import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -2275,4 +2279,27 @@ public DDLDescWithWriteId getAcidDdlDesc() { public WriteEntity getAcidAnalyzeTable() { return null; } + + public void addPropertyReadEntry(Map tblProps, HashSet inputs) throws SemanticException { + if (tblProps.containsKey(Constants.JDBC_KEYSTORE)) { + try { + String keystore = tblProps.get(Constants.JDBC_KEYSTORE); + Configuration conf = new Configuration(); + conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, keystore); + boolean found = false; + for (CredentialProvider provider : CredentialProviderFactory.getProviders(conf)) + if (provider instanceof AbstractJavaKeyStoreProvider) { + Path path = ((AbstractJavaKeyStoreProvider) provider).getPath(); + inputs.add(toReadEntity(path)); + found = true; + } + if (!found) { + throw new SemanticException("Cannot recognize keystore " + keystore + ", only JavaKeyStoreProvider are " + + "supported"); + } + } catch (IOException e) { + throw new SemanticException(e); + } + } + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 29f6ecf..bba7d6c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1771,6 +1771,7 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ alterTblDesc.setDropIfExists(true); } } else { + addPropertyReadEntry(mapProp, inputs); alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDPROPS, partSpec, expectView); } alterTblDesc.setProps(mapProp); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 31bc38e..b760049 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -13325,6 +13325,7 @@ ASTNode analyzeCreateTable( break; case HiveParser.TOK_TABLEPROPERTIES: tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); + addPropertyReadEntry(tblProps, inputs); break; case HiveParser.TOK_TABLESERIALIZER: child = (ASTNode) child.getChild(0); diff --git a/ql/src/test/queries/clientpositive/external_jdbc_auth.q b/ql/src/test/queries/clientpositive/external_jdbc_auth.q index acfb298..3e62a80 100644 --- a/ql/src/test/queries/clientpositive/external_jdbc_auth.q +++ b/ql/src/test/queries/clientpositive/external_jdbc_auth.q @@ -92,3 +92,5 @@ CREATE TABLE hive_table INSERT INTO hive_table VALUES(20); (SELECT * FROM ext_auth1 JOIN hive_table ON ext_auth1.ikey=hive_table.ikey) UNION ALL (SELECT * FROM ext_auth2 JOIN hive_table ON ext_auth2.ikey=hive_table.ikey); + +ALTER TABLE ext_auth1 SET TBLPROPERTIES ("hive.sql.dbcp.password.keystore" = "jceks://file/${system:test.tmp.dir}/../../../data/files/test.jceks"); diff --git a/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out b/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out index badc8b9..2dd5ea7 100644 --- a/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out +++ b/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out @@ -117,6 +117,7 @@ TBLPROPERTIES ( "hive.sql.dbcp.maxActive" = "1" ) PREHOOK: type: CREATETABLE +#### A masked pattern was here #### PREHOOK: Output: database:default PREHOOK: Output: default@ext_auth1 POSTHOOK: query: CREATE EXTERNAL TABLE ext_auth1 @@ -138,6 +139,7 @@ TBLPROPERTIES ( "hive.sql.dbcp.maxActive" = "1" ) POSTHOOK: type: CREATETABLE +#### A masked pattern was here #### POSTHOOK: Output: database:default POSTHOOK: Output: default@ext_auth1 PREHOOK: query: CREATE EXTERNAL TABLE ext_auth2 @@ -159,6 +161,7 @@ TBLPROPERTIES ( "hive.sql.dbcp.maxActive" = "1" ) PREHOOK: type: CREATETABLE +#### A masked pattern was here #### PREHOOK: Output: database:default PREHOOK: Output: default@ext_auth2 POSTHOOK: query: CREATE EXTERNAL TABLE ext_auth2 @@ -180,6 +183,7 @@ TBLPROPERTIES ( "hive.sql.dbcp.maxActive" = "1" ) POSTHOOK: type: CREATETABLE +#### A masked pattern was here #### POSTHOOK: Output: database:default POSTHOOK: Output: default@ext_auth2 PREHOOK: query: CREATE TABLE hive_table @@ -219,3 +223,13 @@ POSTHOOK: Input: default@hive_table #### A masked pattern was here #### 20 20 20.0 20.0 20 20 20 20.0 20.0 20 +#### A masked pattern was here #### +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@ext_auth1 +#### A masked pattern was here #### +PREHOOK: Output: default@ext_auth1 +#### A masked pattern was here #### +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@ext_auth1 +#### A masked pattern was here #### +POSTHOOK: Output: default@ext_auth1