commit 937813817a49ef363391662524298e3616358fa7 Author: Daniel Dai Date: Fri Oct 12 16:00:06 2018 -0700 HIVE-20731: keystore file in JdbcStorageHandler should be authorized diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 1df5c74..c9df668 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -98,6 +99,9 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider; +import org.apache.hadoop.security.alias.CredentialProvider; +import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -2275,4 +2279,27 @@ public DDLDescWithWriteId getAcidDdlDesc() { public WriteEntity getAcidAnalyzeTable() { return null; } + + public void addPropertyReadEntry(Map tblProps, Set inputs) throws SemanticException { + if (tblProps.containsKey(Constants.JDBC_KEYSTORE)) { + try { + String keystore = tblProps.get(Constants.JDBC_KEYSTORE); + Configuration conf = new Configuration(); + conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, keystore); + boolean found = false; + for (CredentialProvider provider : CredentialProviderFactory.getProviders(conf)) + if (provider instanceof AbstractJavaKeyStoreProvider) { + Path path = ((AbstractJavaKeyStoreProvider) provider).getPath(); + inputs.add(toReadEntity(path)); + found = true; + } + if (!found) { + throw new SemanticException("Cannot recognize keystore " + keystore + ", only JavaKeyStoreProvider is " + + "supported"); + } + } catch (IOException e) { + throw new SemanticException(e); + } + } + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 29f6ecf..bba7d6c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1771,6 +1771,7 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ alterTblDesc.setDropIfExists(true); } } else { + addPropertyReadEntry(mapProp, inputs); alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDPROPS, partSpec, expectView); } alterTblDesc.setProps(mapProp); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 31bc38e..b760049 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -13325,6 +13325,7 @@ ASTNode analyzeCreateTable( break; case HiveParser.TOK_TABLEPROPERTIES: tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); + addPropertyReadEntry(tblProps, inputs); break; case HiveParser.TOK_TABLESERIALIZER: child = (ASTNode) child.getChild(0); diff --git a/ql/src/test/queries/clientnegative/authorization_jdbc_keystore.q b/ql/src/test/queries/clientnegative/authorization_jdbc_keystore.q new file mode 100644 index 0000000..e80cefb --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_jdbc_keystore.q @@ -0,0 +1,25 @@ +--! qt:dataset: + +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.fallback.FallbackHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +CREATE EXTERNAL TABLE ext_auth1 +( + ikey int, + bkey bigint, + fkey float, + dkey double +) +STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "DERBY", + "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver", + "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;collation=TERRITORY_BASED:PRIMARY", + "hive.sql.dbcp.username" = "user1", + "hive.sql.dbcp.password.keystore" = "jceks://file/${system:test.tmp.dir}/../../../data/files/test.jceks", + "hive.sql.dbcp.password.key" = "test_derby_auth1.password", + "hive.sql.table" = "SIMPLE_DERBY_TABLE1", + "hive.sql.dbcp.maxActive" = "1" +); diff --git a/ql/src/test/queries/clientpositive/external_jdbc_auth.q b/ql/src/test/queries/clientpositive/external_jdbc_auth.q index acfb298..3e62a80 100644 --- a/ql/src/test/queries/clientpositive/external_jdbc_auth.q +++ b/ql/src/test/queries/clientpositive/external_jdbc_auth.q @@ -92,3 +92,5 @@ CREATE TABLE hive_table INSERT INTO hive_table VALUES(20); (SELECT * FROM ext_auth1 JOIN hive_table ON ext_auth1.ikey=hive_table.ikey) UNION ALL (SELECT * FROM ext_auth2 JOIN hive_table ON ext_auth2.ikey=hive_table.ikey); + +ALTER TABLE ext_auth1 SET TBLPROPERTIES ("hive.sql.dbcp.password.keystore" = "jceks://file/${system:test.tmp.dir}/../../../data/files/test.jceks"); diff --git a/ql/src/test/results/clientnegative/authorization_jdbc_keystore.q.out b/ql/src/test/results/clientnegative/authorization_jdbc_keystore.q.out new file mode 100644 index 0000000..1e978b4 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_jdbc_keystore.q.out @@ -0,0 +1 @@ +FAILED: RuntimeException org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.ClassNotFoundException: org.apache.hadoop.hive.ql.security.authorization.plugin.fallback.FallbackHiveAuthorizerFactory diff --git a/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out b/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out index badc8b9..2dd5ea7 100644 --- a/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out +++ b/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out @@ -117,6 +117,7 @@ TBLPROPERTIES ( "hive.sql.dbcp.maxActive" = "1" ) PREHOOK: type: CREATETABLE +#### A masked pattern was here #### PREHOOK: Output: database:default PREHOOK: Output: default@ext_auth1 POSTHOOK: query: CREATE EXTERNAL TABLE ext_auth1 @@ -138,6 +139,7 @@ TBLPROPERTIES ( "hive.sql.dbcp.maxActive" = "1" ) POSTHOOK: type: CREATETABLE +#### A masked pattern was here #### POSTHOOK: Output: database:default POSTHOOK: Output: default@ext_auth1 PREHOOK: query: CREATE EXTERNAL TABLE ext_auth2 @@ -159,6 +161,7 @@ TBLPROPERTIES ( "hive.sql.dbcp.maxActive" = "1" ) PREHOOK: type: CREATETABLE +#### A masked pattern was here #### PREHOOK: Output: database:default PREHOOK: Output: default@ext_auth2 POSTHOOK: query: CREATE EXTERNAL TABLE ext_auth2 @@ -180,6 +183,7 @@ TBLPROPERTIES ( "hive.sql.dbcp.maxActive" = "1" ) POSTHOOK: type: CREATETABLE +#### A masked pattern was here #### POSTHOOK: Output: database:default POSTHOOK: Output: default@ext_auth2 PREHOOK: query: CREATE TABLE hive_table @@ -219,3 +223,13 @@ POSTHOOK: Input: default@hive_table #### A masked pattern was here #### 20 20 20.0 20.0 20 20 20 20.0 20.0 20 +#### A masked pattern was here #### +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: default@ext_auth1 +#### A masked pattern was here #### +PREHOOK: Output: default@ext_auth1 +#### A masked pattern was here #### +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: default@ext_auth1 +#### A masked pattern was here #### +POSTHOOK: Output: default@ext_auth1