diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java new file mode 100644 index 0000000..f42390c --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java @@ -0,0 +1,123 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.jdbc.authorization; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.verify; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Matchers; +import org.mockito.Mockito; +/** + * Test context information that gets passed to authorization api + */ +public class TestHS2AuthzContext { + private static MiniHS2 miniHS2 = null; + static HiveAuthorizer mockedAuthorizer; + + /** + * This factory creates a mocked HiveAuthorizer class. + * Use the mocked class to capture the argument passed to it in the test case. + */ + static class MockedHiveAuthorizerFactory implements HiveAuthorizerFactory { + @Override + public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, HiveAuthenticationProvider authenticator) { + TestHS2AuthzContext.mockedAuthorizer = Mockito.mock(HiveAuthorizer.class); + return TestHS2AuthzContext.mockedAuthorizer; + } + } + + @BeforeClass + public static void beforeTest() throws Exception { + Class.forName(MiniHS2.getJdbcDriverName()); + HiveConf conf = new HiveConf(); + conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); + conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); + conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); + conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + + miniHS2 = new MiniHS2(conf); + miniHS2.start(new HashMap()); + } + + @AfterClass + public static void afterTest() throws Exception { + if (miniHS2.isStarted()) { + miniHS2.stop(); + } + } + + @Test + public void testAuthzContextContents() throws Exception { + + Connection hs2Conn = getConnection("user1"); + Statement stmt = hs2Conn.createStatement(); + + final String cmd = "show tables"; + stmt.execute(cmd); + stmt.close(); + hs2Conn.close(); + + ArgumentCaptor contextCapturer = ArgumentCaptor + .forClass(HiveAuthzContext.class); + + verify(mockedAuthorizer).checkPrivileges(any(HiveOperationType.class), + Matchers.anyListOf(HivePrivilegeObject.class), + Matchers.anyListOf(HivePrivilegeObject.class), contextCapturer.capture()); + + HiveAuthzContext context = contextCapturer.getValue(); + + assertEquals("Command ", cmd, context.getCommandString()); + assertTrue("ip address pattern check", context.getIpAddress().contains(".")); + // ip address size check - check for something better than non zero + assertTrue("ip address size check", context.getIpAddress().length() > 7); + // session string is supposed to be unique, so its got to be of some reasonable size + assertTrue("session string size check", context.getSessionString().length() > 10); + assertEquals("Client type ", HiveAuthzContext.CLIENT_TYPE.HIVESERVER2, context.getClientType()); + } + + private Connection getConnection(String userName) throws SQLException { + return DriverManager.getConnection(miniHS2.getJdbcURL(), userName, "bar"); + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index ac76214..c89f90c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -101,6 +101,8 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext.CLIENT_TYPE; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; @@ -445,7 +447,7 @@ public int compile(String command, boolean resetTaskIds) { try { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); - doAuthorization(sem); + doAuthorization(sem, command); } catch (AuthorizationException authExp) { console.printError("Authorization failed:" + authExp.getMessage() + ". Use SHOW GRANT to get more details."); @@ -483,15 +485,25 @@ public int compile(String command, boolean resetTaskIds) { } } - public static void doAuthorization(BaseSemanticAnalyzer sem) + /** + * Do authorization using post semantic analysis information in the semantic analyzer + * The original command is also passed so that authorization interface can provide + * more useful information in logs. + * @param sem + * @param command + * @throws HiveException + * @throws AuthorizationException + */ + public static void doAuthorization(BaseSemanticAnalyzer sem, String command) throws HiveException, AuthorizationException { HashSet inputs = sem.getInputs(); HashSet outputs = sem.getOutputs(); SessionState ss = SessionState.get(); HiveOperation op = ss.getHiveOperation(); Hive db = sem.getDb(); + if (ss.isAuthorizationModeV2()) { - doAuthorizationV2(ss, op, inputs, outputs); + doAuthorizationV2(ss, op, inputs, outputs, command); return; } if (op == null) { @@ -672,11 +684,20 @@ public static void doAuthorization(BaseSemanticAnalyzer sem) } private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet inputs, - HashSet outputs) throws HiveException { + HashSet outputs, String command) throws HiveException { + + HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder(); + + authzContextBuilder.setClientType(ss.isHiveServerQuery() ? CLIENT_TYPE.HIVESERVER2 + : CLIENT_TYPE.HIVECLI); + authzContextBuilder.setUserIpAddress(ss.getUserIpAddress()); + authzContextBuilder.setSessionString(ss.getSessionId()); + authzContextBuilder.setCommandString(command); + HiveOperationType hiveOpType = getHiveOperationType(op); List inputsHObjs = getHivePrivObjects(inputs); List outputHObjs = getHivePrivObjects(outputs); - ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs); + ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs, authzContextBuilder.build()); return; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 2c5b463..5e11a5c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -32,8 +32,8 @@ import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.LinkedList; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; @@ -50,9 +50,9 @@ import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ExplainWork; -import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory; import org.apache.hadoop.hive.ql.session.SessionState; @@ -69,7 +69,7 @@ public class ExplainTask extends Task implements Serializable { private static final long serialVersionUID = 1L; public static final String EXPL_COLUMN_NAME = "Explain"; - private Set> visitedOps = new HashSet>(); + private final Set> visitedOps = new HashSet>(); private boolean isLogical = false; public ExplainTask() { @@ -167,7 +167,7 @@ public JSONObject getJSONPlan(PrintStream out, ExplainWork work) public JSONObject getJSONPlan(PrintStream out, String ast, List> tasks, Task fetchTask, boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws Exception { - + // If the user asked for a formatted output, dump the json output // in the output stream JSONObject outJSONObject = new JSONObject(); @@ -335,11 +335,9 @@ private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work) } final List exceptions = new ArrayList(); - Object delegate = SessionState.get().getActiveAuthorizer(); if (delegate != null) { Class itface = SessionState.get().getAuthorizerInterface(); - Object authorizer = AuthorizationFactory.create(delegate, itface, new AuthorizationFactory.AuthorizationExceptionHandler() { public void exception(Exception exception) { @@ -349,7 +347,7 @@ public void exception(Exception exception) { SessionState.get().setActiveAuthorizer(authorizer); try { - Driver.doAuthorization(analyzer); + Driver.doAuthorization(analyzer, ""); } finally { SessionState.get().setActiveAuthorizer(delegate); } @@ -399,7 +397,7 @@ private JSONObject outputMap(Map mp, boolean hasHeader, PrintStream out, } } else if (ent.getValue() instanceof List) { - if (ent.getValue() != null && !((List)ent.getValue()).isEmpty() + if (ent.getValue() != null && !((List)ent.getValue()).isEmpty() && ((List)ent.getValue()).get(0) != null && ((List)ent.getValue()).get(0) instanceof TezWork.Dependency) { if (out != null) { @@ -908,6 +906,7 @@ public JSONObject outputStagePlans(PrintStream out, List tasks, * */ public class MethodComparator implements Comparator { + @Override public int compare(Method m1, Method m2) { return m1.getName().compareTo(m2.getName()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java index 7ffbc44..c2282df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java @@ -37,10 +37,11 @@ * @param hiveOpType * @param inputHObjs * @param outputHObjs + * @param context * @throws HiveAuthzPluginException * @throws HiveAccessControlException */ void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs, - List outputHObjs) throws HiveAuthzPluginException, HiveAccessControlException; + List outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java index dbef61a..911a943 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java @@ -146,11 +146,12 @@ void revokeRole(List hivePrincipals, List roles, boolean * @param hiveOpType * @param inputsHObjs * @param outputHObjs + * @param context * @throws HiveAuthzPluginException * @throws HiveAccessControlException */ void checkPrivileges(HiveOperationType hiveOpType, List inputsHObjs, - List outputHObjs) + List outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java index 558d4ff..69b803a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java @@ -80,8 +80,9 @@ public void revokeRole(List hivePrincipals, List roles, @Override public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs, - List outputHObjs) throws HiveAuthzPluginException, HiveAccessControlException { - authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs); + List outputHObjs, HiveAuthzContext context) + throws HiveAuthzPluginException, HiveAccessControlException { + authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, context); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java new file mode 100644 index 0000000..bb71a7e --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java @@ -0,0 +1,113 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate; +import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; + +/** + * Provides context information in authorization check call that can be used for + * auditing and/or authorization. + * It is an immutable class. Builder inner class is used instantiate it. + */ +@LimitedPrivate(value = { "" }) +@Evolving +public final class HiveAuthzContext { + + public enum CLIENT_TYPE { + HIVESERVER2, HIVECLI + }; + + public static class Builder { + private String userIpAddress; + private String sessionString; + private CLIENT_TYPE clientType; + private String commandString; + + /** + * Get user's ip address. This is set only if the authorization + * api is invoked from a HiveServer2 instance in standalone mode. + * @return ip address + */ + public String getUserIpAddress() { + return userIpAddress; + } + public void setUserIpAddress(String userIpAddress) { + this.userIpAddress = userIpAddress; + } + public String getSessionString() { + return sessionString; + } + public void setSessionString(String sessionString) { + this.sessionString = sessionString; + } + public CLIENT_TYPE getClientType() { + return clientType; + } + public void setClientType(CLIENT_TYPE clientType) { + this.clientType = clientType; + } + public String getCommandString() { + return commandString; + } + public void setCommandString(String commandString) { + this.commandString = commandString; + } + public HiveAuthzContext build(){ + return new HiveAuthzContext(this); + } + + + } + + private final String userIpAddress; + private final String sessionString; + private final CLIENT_TYPE clientType; + private final String commandString; + + private HiveAuthzContext(Builder builder) { + this.userIpAddress = builder.userIpAddress; + this.sessionString = builder.sessionString; + this.clientType = builder.clientType; + this.commandString = builder.commandString; + + } + + public String getIpAddress() { + return userIpAddress; + } + + public String getSessionString() { + return sessionString; + } + + public CLIENT_TYPE getClientType() { + return clientType; + } + + public String getCommandString() { + return commandString; + } + + @Override + public String toString() { + return "HiveAuthzContext [userIpAddress=" + userIpAddress + ", sessionString=" + sessionString + + ", clientType=" + clientType + ", commandString=" + commandString + "]"; + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java index 8937cfa..07af475 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; @@ -57,13 +58,13 @@ public SQLStdHiveAuthorizationValidator(HiveMetastoreClientFactory metastoreClie @Override public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs, - List outputHObjs) throws HiveAuthzPluginException, - HiveAccessControlException { + List outputHObjs, HiveAuthzContext context) + throws HiveAuthzPluginException, HiveAccessControlException { if (LOG.isDebugEnabled()) { String msg = "Checking privileges for operation " + hiveOpType + " by user " + authenticator.getUserName() + " on " + " input objects " + inputHObjs - + " and output objects " + outputHObjs; + + " and output objects " + outputHObjs + ". Context Info: " + context; LOG.debug(msg); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index dbc04c7..fcfcf42 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hive.ql.session; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; -import com.google.common.base.Preconditions; - import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -69,6 +67,8 @@ import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.util.ReflectionUtils; +import com.google.common.base.Preconditions; + /** * SessionState encapsulates common data associated with a session. * @@ -83,7 +83,7 @@ private static final String LOCAL_SESSION_PATH_KEY = "_hive.local.session.path"; private static final String HDFS_SESSION_PATH_KEY = "_hive.hdfs.session.path"; private static final String TMP_TABLE_SPACE_KEY = "_hive.tmp_table_space"; - private Map> tempTables = new HashMap>(); + private final Map> tempTables = new HashMap>(); protected ClassLoader parentLoader; @@ -173,6 +173,8 @@ private final String CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER = "hive.internal.ss.authz.settings.applied.marker"; + private String userIpAddress; + /** * Lineage state. */ @@ -466,7 +468,7 @@ private void createPath(Configuration conf, Path p, String perm) throws IOExcept FileSystem fs = p.getFileSystem(conf); p = new Path(fs.makeQualified(p).toString()); FsPermission fsPermission = new FsPermission(Short.parseShort(perm.trim(), 8)); - + if (!Utilities.createDirsWithPermission(conf, p, fsPermission)) { throw new IOException("Cannot create directory: " + p.toString()); @@ -475,7 +477,7 @@ private void createPath(Configuration conf, Path p, String perm) throws IOExcept // best effort to clean up if we don't shut down properly fs.deleteOnExit(p); } - + /** * Setup authentication and authorization plugins for this session. @@ -1135,4 +1137,20 @@ public void applyAuthorizationPolicy() throws HiveException { public Map> getTempTables() { return tempTables; } + + /** + * @return ip address for user running the query + */ + public String getUserIpAddress() { + return userIpAddress; + } + + /** + * set the ip address for user running the query + * @param userIpAddress + */ + public void setUserIpAddress(String userIpAddress) { + this.userIpAddress = userIpAddress; + } + } diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 6a7ee7a..7a3286d 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -98,6 +98,7 @@ public HiveSessionImpl(TProtocolVersion protocol, String username, String passwo hiveConf.setInt(ListSinkOperator.OUTPUT_PROTOCOL, protocol.getValue()); sessionState = new SessionState(hiveConf, username); + sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); SessionState.start(sessionState);