diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuth.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuth.java new file mode 100644 index 0000000..31af5d2 --- /dev/null +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuth.java @@ -0,0 +1,150 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.minikdc; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.Test; + +public abstract class JdbcWithMiniKdcSQLAuth { + + + private static MiniHS2 miniHS2 = null; + private static MiniHiveKdc miniHiveKdc = null; + private Connection hs2Conn; + protected static HiveConf hiveConf = new HiveConf(); + + public static void beforeTestBase() throws Exception { + System.err.println("Testing using HS2 mode:" + + hiveConf.getVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE)); + + Class.forName(MiniHS2.getJdbcDriverName()); + hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, + SQLStdHiveAuthorizerFactory.class.getName()); + hiveConf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, + SessionStateUserAuthenticator.class.getName()); + hiveConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); + hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + + miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); + miniHS2.start(new HashMap()); + + } + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + if (hs2Conn != null) { + try { + hs2Conn.close(); + } catch (Exception e) { + // Ignore shutdown errors since there are negative tests + } + } + } + + @AfterClass + public static void afterTest() throws Exception { + miniHS2.stop(); + } + + @Test + public void testAuthorization1() throws Exception { + + String tableName1 = "test_jdbc_sql_auth1"; + String tableName2 = "test_jdbc_sql_auth2"; + // using different code blocks so that jdbc variables are not accidently re-used + // between the actions. Different connection/statement object should be used for each action. + { + // create tables as user1 + Connection hs2Conn = getConnection(MiniHiveKdc.HIVE_TEST_USER_1); + + Statement stmt = hs2Conn.createStatement(); + + // create tables + stmt.execute("create table " + tableName1 + "(i int) "); + stmt.execute("create table " + tableName2 + "(i int) "); + stmt.execute("grant select on table " + tableName2 + " to user " + + MiniHiveKdc.HIVE_TEST_USER_2); + stmt.close(); + hs2Conn.close(); + } + + { + // try dropping table as user1 - should succeed + Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_1)); + Statement stmt = hs2Conn.createStatement(); + stmt.execute("drop table " + tableName1); + } + + { + // try dropping table as user2 - should fail + Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_2)); + try { + Statement stmt = hs2Conn.createStatement(); + stmt.execute("drop table " + tableName2); + fail("Exception due to authorization failure is expected"); + } catch (SQLException e) { + String msg = e.getMessage(); + System.err.println("Got SQLException with message " + msg); + // check parts of the error, not the whole string so as not to tightly + // couple the error message with test + assertTrue("Checking permission denied error", msg.contains("user2")); + assertTrue("Checking permission denied error", msg.contains(tableName2)); + assertTrue("Checking permission denied error", msg.contains("OBJECT OWNERSHIP")); + } + } + + { + // try reading table2 as user2 - should succeed + Connection hs2Conn = getConnection((MiniHiveKdc.HIVE_TEST_USER_2)); + Statement stmt = hs2Conn.createStatement(); + stmt.execute(" desc " + tableName2); + } + + } + + private Connection getConnection(String userName) throws Exception { + miniHiveKdc.loginUser(userName); + return DriverManager.getConnection(miniHS2.getJdbcURL()); + } + + + +} diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java index f7ec93d..9bf5e1f 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java @@ -28,26 +28,35 @@ import java.util.Map; import java.util.Properties; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.GroupMappingServiceProvider; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hive.jdbc.miniHS2.MiniHS2; import com.google.common.io.Files; +/** + * Wrapper around Hadoop's MiniKdc for use in hive tests. + * Has functions to manager users and their keytabs. This includes a hive service principal, + * a superuser principal for testing proxy user privilegs. + * Has a set of default users that it initializes. + * See hive-minikdc/src/test/resources/core-site.xml for users granted proxy user privileges. + */ public class MiniHiveKdc { public static String HIVE_SERVICE_PRINCIPAL = "hive"; public static String HIVE_TEST_USER_1 = "user1"; public static String HIVE_TEST_USER_2 = "user2"; public static String HIVE_TEST_SUPER_USER = "superuser"; - private MiniKdc miniKdc; - private File workDir; - private Configuration conf; - private Map userPrincipals = + private final MiniKdc miniKdc; + private final File workDir; + private final Configuration conf; + private final Map userPrincipals = new HashMap(); - private Properties kdcConf = MiniKdc.createConf(); + private final Properties kdcConf = MiniKdc.createConf(); private int keyTabCounter = 1; // hadoop group mapping that maps user to same group @@ -112,6 +121,12 @@ public void addUserPrincipal(String principal) throws Exception { userPrincipals.put(principal, keytab.getPath()); } + /** + * Login the given principal, using corresponding keytab file from internal map + * @param principal + * @return + * @throws Exception + */ public UserGroupInformation loginUser(String principal) throws Exception { ShimLoader.getHadoopShims().loginUserFromKeytab(principal, @@ -147,5 +162,22 @@ public String getDefaultUserPrincipal() { return HIVE_TEST_USER_1; } + /** + * Create a MiniHS2 with the hive service principal and keytab in MiniHiveKdc + * @param miniHiveKdc + * @param hiveConf + * @return new MiniHS2 instance + * @throws Exception + */ + public static MiniHS2 getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveConf) throws Exception { + String hivePrincipal = + miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL); + String hiveKeytab = miniHiveKdc.getKeyTabFile( + miniHiveKdc.getServicePrincipalForUser(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL)); + + return new MiniHS2.Builder().withConf(hiveConf). + withMiniKdc(hivePrincipal, hiveKeytab).build(); + } + } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java index 62bfa1e..2355519 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java @@ -18,7 +18,10 @@ package org.apache.hive.minikdc; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.sql.Connection; import java.sql.DriverManager; @@ -67,15 +70,10 @@ public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); confOverlay.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, SessionHookTest.class.getName()); + HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); - String hivePrincipal = - miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL); - String hiveKeytab = miniHiveKdc.getKeyTabFile( - miniHiveKdc.getServicePrincipalForUser(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL)); - - miniHS2 = new MiniHS2.Builder().withConf(new HiveConf()). - withMiniKdc(hivePrincipal, hiveKeytab).build(); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); miniHS2.start(confOverlay); } @@ -107,8 +105,7 @@ public static void afterTest() throws Exception { public void testConnection() throws Exception { miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1); hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL()); - verifyProperty(SESSION_USER_NAME, miniHiveKdc. - getFullyQualifiedUserPrincipal(MiniHiveKdc.HIVE_TEST_USER_1)); + verifyProperty(SESSION_USER_NAME, MiniHiveKdc.HIVE_TEST_USER_1); } /*** diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthBinary.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthBinary.java new file mode 100644 index 0000000..842f20a --- /dev/null +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthBinary.java @@ -0,0 +1,34 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.minikdc; + +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.junit.BeforeClass; + +public class TestJdbcWithMiniKdcSQLAuthBinary extends JdbcWithMiniKdcSQLAuth { + + @BeforeClass + public static void beforeTest() throws Exception { + hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, MiniHS2.HS2_HTTP_MODE); + JdbcWithMiniKdcSQLAuth.beforeTestBase(); + + } + +} diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthHttp.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthHttp.java new file mode 100644 index 0000000..4fe6db6 --- /dev/null +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcSQLAuthHttp.java @@ -0,0 +1,34 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.minikdc; + +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.junit.BeforeClass; + +public class TestJdbcWithMiniKdcSQLAuthHttp extends JdbcWithMiniKdcSQLAuth { + + @BeforeClass + public static void beforeTest() throws Exception { + hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, MiniHS2.HS2_HTTP_MODE); + JdbcWithMiniKdcSQLAuth.beforeTestBase(); + + } + +} diff --git a/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java b/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java index d08bfde..e8d405d 100644 --- a/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java +++ b/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java @@ -20,12 +20,8 @@ import java.io.File; import java.io.IOException; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; import java.util.HashMap; import java.util.Map; -import java.util.Properties; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; @@ -39,7 +35,6 @@ import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim; import org.apache.hadoop.hive.shims.HadoopShims.MiniMrShim; import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hive.jdbc.HiveConnection; import org.apache.hive.service.Service; import org.apache.hive.service.cli.CLIServiceClient; import org.apache.hive.service.cli.SessionHandle; @@ -51,19 +46,19 @@ import com.google.common.io.Files; public class MiniHS2 extends AbstractHiveService { + public static final String HS2_BINARY_MODE = "binary"; + public static final String HS2_HTTP_MODE = "http"; private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; private HiveServer2 hiveServer2 = null; private final File baseDir; private final Path baseDfsDir; private static final AtomicLong hs2Counter = new AtomicLong(); - private static final String HS2_BINARY_MODE = "binary"; - private static final String HS2_HTTP_MODE = "http"; private MiniMrShim mr; private MiniDFSShim dfs; private boolean useMiniMR = false; private boolean useMiniKdc = false; - private String serverPrincipal; - private String serverKeytab; + private final String serverPrincipal; + private final String serverKeytab; public static class Builder { private HiveConf hiveConf = new HiveConf(); @@ -71,6 +66,7 @@ private boolean useMiniKdc = false; private String serverPrincipal; private String serverKeytab; + private boolean isHTTPTransMode = false; public Builder() { } @@ -92,10 +88,25 @@ public Builder withConf(HiveConf hiveConf) { return this; } + /** + * Start HS2 with HTTP transport mode, default is binary mode + * @return this Builder + */ + public Builder withHTTPTransport(){ + this.isHTTPTransMode = true; + return this; + } + + public MiniHS2 build() throws Exception { if (useMiniMR && useMiniKdc) { throw new IOException("Can't create secure miniMr ... yet"); } + if (isHTTPTransMode) { + hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_HTTP_MODE); + } else { + hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE); + } return new MiniHS2(hiveConf, useMiniMR, useMiniKdc, serverPrincipal, serverKeytab); } } @@ -164,7 +175,6 @@ private MiniHS2(HiveConf hiveConf, boolean useMiniMR, boolean useMiniKdc, String hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStoreURL); // reassign a new port, just in case if one of the MR services grabbed the last one setBinaryPort(MetaStoreUtils.findFreePort()); - hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE); hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, getHost()); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getBinaryPort()); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort()); @@ -253,16 +263,35 @@ public String getJdbcURL(String dbName) { /** * return connection URL for this server instance * @param dbName - DB name to be included in the URL - * @param urlExtension - Addional string to be appended to URL + * @param sessionConfExt - Addional string to be appended to sessionConf part of url + * @return + */ + public String getJdbcURL(String dbName, String sessionConfExt) { + return getJdbcURL(dbName, sessionConfExt, ""); + } + + /** + * return connection URL for this server instance + * @param dbName - DB name to be included in the URL + * @param sessionConfExt - Addional string to be appended to sessionConf part of url + * @param hiveConfExt - Additional string to be appended to HiveConf part of url (excluding the ?) * @return */ - public String getJdbcURL(String dbName, String urlExtension) { - assert urlExtension != null; + public String getJdbcURL(String dbName, String sessionConfExt, String hiveConfExt) { + sessionConfExt = (sessionConfExt == null ? "" : sessionConfExt); + hiveConfExt = (hiveConfExt == null ? "" : hiveConfExt); String krbConfig = ""; if (isUseMiniKdc()) { krbConfig = ";principal=" + serverPrincipal; } - return getBaseJdbcURL() + dbName + krbConfig + urlExtension; + if (isHttpTransportMode()) { + hiveConfExt = "hive.server2.transport.mode=http;hive.server2.thrift.http.path=cliservice;" + + hiveConfExt; + } + if (!hiveConfExt.trim().equals("")) { + hiveConfExt = "?" + hiveConfExt; + } + return getBaseJdbcURL() + dbName + krbConfig + sessionConfExt + hiveConfExt; } /** @@ -270,8 +299,7 @@ public String getJdbcURL(String dbName, String urlExtension) { * @return */ public String getBaseJdbcURL() { - String transportMode = getConfProperty(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname); - if(transportMode != null && (transportMode.equalsIgnoreCase(HS2_HTTP_MODE))) { + if(isHttpTransportMode()) { return "jdbc:hive2://" + getHost() + ":" + getHttpPort() + "/"; } else { @@ -279,6 +307,11 @@ public String getBaseJdbcURL() { } } + private boolean isHttpTransportMode() { + String transportMode = getConfProperty(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname); + return transportMode != null && (transportMode.equalsIgnoreCase(HS2_HTTP_MODE)); + } + public static String getJdbcDriverName() { return driverName; } diff --git a/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java b/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java index 9e296de..8b9da7a 100644 --- a/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java +++ b/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java @@ -609,6 +609,7 @@ public boolean process(final TProtocol inProt, final TProtocol outProt) throws T clientUgi = UserGroupInformation.createProxyUser( endUser, UserGroupInformation.getLoginUser()); remoteUser.set(clientUgi.getShortUserName()); + LOG.debug("Set remoteUser :" + remoteUser.get()); return clientUgi.doAs(new PrivilegedExceptionAction() { @Override public Boolean run() { @@ -620,7 +621,10 @@ public Boolean run() { } }); } else { - remoteUser.set(endUser); + // use the short user name for the request + UserGroupInformation endUserUgi = UserGroupInformation.createRemoteUser(endUser); + remoteUser.set(endUserUgi.getShortUserName()); + LOG.debug("Set remoteUser :" + remoteUser.get() + ", from endUser :" + endUser); return wrapped.process(inProt, outProt); } } catch (RuntimeException rte) {