diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java index f7ec93d..9bf5e1f 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java @@ -28,26 +28,35 @@ import java.util.Map; import java.util.Properties; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.GroupMappingServiceProvider; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hive.jdbc.miniHS2.MiniHS2; import com.google.common.io.Files; +/** + * Wrapper around Hadoop's MiniKdc for use in hive tests. + * Has functions to manager users and their keytabs. This includes a hive service principal, + * a superuser principal for testing proxy user privilegs. + * Has a set of default users that it initializes. + * See hive-minikdc/src/test/resources/core-site.xml for users granted proxy user privileges. + */ public class MiniHiveKdc { public static String HIVE_SERVICE_PRINCIPAL = "hive"; public static String HIVE_TEST_USER_1 = "user1"; public static String HIVE_TEST_USER_2 = "user2"; public static String HIVE_TEST_SUPER_USER = "superuser"; - private MiniKdc miniKdc; - private File workDir; - private Configuration conf; - private Map userPrincipals = + private final MiniKdc miniKdc; + private final File workDir; + private final Configuration conf; + private final Map userPrincipals = new HashMap(); - private Properties kdcConf = MiniKdc.createConf(); + private final Properties kdcConf = MiniKdc.createConf(); private int keyTabCounter = 1; // hadoop group mapping that maps user to same group @@ -112,6 +121,12 @@ public void addUserPrincipal(String principal) throws Exception { userPrincipals.put(principal, keytab.getPath()); } + /** + * Login the given principal, using corresponding keytab file from internal map + * @param principal + * @return + * @throws Exception + */ public UserGroupInformation loginUser(String principal) throws Exception { ShimLoader.getHadoopShims().loginUserFromKeytab(principal, @@ -147,5 +162,22 @@ public String getDefaultUserPrincipal() { return HIVE_TEST_USER_1; } + /** + * Create a MiniHS2 with the hive service principal and keytab in MiniHiveKdc + * @param miniHiveKdc + * @param hiveConf + * @return new MiniHS2 instance + * @throws Exception + */ + public static MiniHS2 getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveConf) throws Exception { + String hivePrincipal = + miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL); + String hiveKeytab = miniHiveKdc.getKeyTabFile( + miniHiveKdc.getServicePrincipalForUser(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL)); + + return new MiniHS2.Builder().withConf(hiveConf). + withMiniKdc(hivePrincipal, hiveKeytab).build(); + } + } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java index 62bfa1e..83b00f5 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java @@ -18,7 +18,10 @@ package org.apache.hive.minikdc; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.sql.Connection; import java.sql.DriverManager; @@ -67,15 +70,10 @@ public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); confOverlay.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, SessionHookTest.class.getName()); + HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); - String hivePrincipal = - miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL); - String hiveKeytab = miniHiveKdc.getKeyTabFile( - miniHiveKdc.getServicePrincipalForUser(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL)); - - miniHS2 = new MiniHS2.Builder().withConf(new HiveConf()). - withMiniKdc(hivePrincipal, hiveKeytab).build(); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); miniHS2.start(confOverlay); } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuth.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuth.java new file mode 100644 index 0000000..5a88ccc --- /dev/null +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuth.java @@ -0,0 +1,147 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.minikdc; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestJdbcWithMiniKdcSQLAuth { + + + private static MiniHS2 miniHS2 = null; + private static MiniHiveKdc miniHiveKdc = null; + private Connection hs2Conn; + + @BeforeClass + public static void beforeTest() throws Exception { + Class.forName(MiniHS2.getJdbcDriverName()); + HiveConf conf = new HiveConf(); + conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SQLStdHiveAuthorizerFactory.class.getName()); + conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); + conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); + conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + + miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(conf); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, conf); + miniHS2.start(new HashMap()); + + } + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + if (hs2Conn != null) { + try { + hs2Conn.close(); + } catch (Exception e) { + // Ignore shutdown errors since there are negative tests + } + } + } + + @AfterClass + public static void afterTest() throws Exception { + miniHS2.stop(); + } + + @Test + public void testAuthorization1() throws Exception { + + String tableName1 = "test_jdbc_sql_auth1"; + String tableName2 = "test_jdbc_sql_auth2"; + // using different code blocks so that jdbc variables are not accidently re-used + // between the actions. Different connection/statement object should be used for each action. + { + // create tables as user1 + Connection hs2Conn = getConnection(MiniHiveKdc.HIVE_TEST_USER_1); + + Statement stmt = hs2Conn.createStatement(); + + // create tables + stmt.execute("create table " + tableName1 + "(i int) "); + stmt.execute("create table " + tableName2 + "(i int) "); + stmt.execute("grant select on table " + tableName2 + " to user " + + MiniHiveKdc.HIVE_TEST_USER_2); + stmt.close(); + hs2Conn.close(); + } + + { + // try dropping table as user1 - should succeed + Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_1)); + Statement stmt = hs2Conn.createStatement(); + stmt.execute("drop table " + tableName1); + } + + { + // try dropping table as user2 - should fail + Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_2)); + try { + Statement stmt = hs2Conn.createStatement(); + stmt.execute("drop table " + tableName2); + fail("Exception due to authorization failure is expected"); + } catch (SQLException e) { + String msg = e.getMessage(); + System.err.println("Got SQLException with message " + msg); + // check parts of the error, not the whole string so as not to tightly + // couple the error message with test + assertTrue("Checking permission denied error", msg.contains("user2")); + assertTrue("Checking permission denied error", msg.contains(tableName2)); + assertTrue("Checking permission denied error", msg.contains("OBJECT OWNERSHIP")); + } + } + + { + // try reading table2 as user2 - should succeed + Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_2)); + Statement stmt = hs2Conn.createStatement(); + stmt.execute(" desc " + tableName2); + } + + } + + private Connection getConnection(String userName) throws Exception { + miniHiveKdc.loginUser(userName); + return DriverManager.getConnection(miniHS2.getJdbcURL()); + } + + + +} diff --git a/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java b/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java index 9e296de..b270d39 100644 --- a/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java +++ b/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java @@ -620,7 +620,9 @@ public Boolean run() { } }); } else { - remoteUser.set(endUser); + // use the short user name for the request + UserGroupInformation endUserUgi = UserGroupInformation.createRemoteUser(endUser); + remoteUser.set(endUserUgi.getShortUserName()); return wrapped.process(inProt, outProt); } } catch (RuntimeException rte) {