diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java index c5e36a5..d3de05e 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -501,6 +501,7 @@ boolean initArgs(String[] args) { List commands = new LinkedList(); List files = new LinkedList(); String driver = null, user = null, pass = null, url = null, cmd = null; + String auth = null; for (int i = 0; i < args.length; i++) { if (args[i].equals("--help") || args[i].equals("-h")) { @@ -543,6 +544,9 @@ boolean initArgs(String[] args) { driver = args[i++ + 1]; } else if (args[i].equals("-n")) { user = args[i++ + 1]; + } else if (args[i].equals("-a")) { + auth = args[i++ + 1]; + getOpts().setAuthType(auth); } else if (args[i].equals("-p")) { pass = args[i++ + 1]; } else if (args[i].equals("-u")) { diff --git beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java index c3abba3..afd38a7 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java +++ beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java @@ -86,6 +86,8 @@ private String historyFile = new File(saveDir(), "history").getAbsolutePath(); private String scriptFile = null; + private String authType = null; + private Map hiveVariables = new HashMap(); @@ -463,6 +465,13 @@ public String getNullString(){ return nullEmptyString ? "" : DEFAULT_NULL_STRING; } + public String getAuthType() { + return authType; + } + + public void setAuthType(String authType) { + this.authType = authType; + } } diff --git beeline/src/java/org/apache/hive/beeline/Commands.java beeline/src/java/org/apache/hive/beeline/Commands.java index d2d7fd3..3dbe444 100644 --- beeline/src/java/org/apache/hive/beeline/Commands.java +++ beeline/src/java/org/apache/hive/beeline/Commands.java @@ -862,7 +862,7 @@ public boolean connect(String line) throws Exception { String user = parts.length < 3 ? null : parts[2]; String pass = parts.length < 4 ? null : parts[3]; String driver = parts.length < 5 ? null : parts[4]; - + Properties props = new Properties(); if (url != null) { props.setProperty("url", url); @@ -876,6 +876,7 @@ public boolean connect(String line) throws Exception { if (pass != null) { props.setProperty("password", pass); } + return connect(props); } @@ -922,6 +923,7 @@ public boolean connect(Properties props) throws IOException { "javax.jdo.option.ConnectionPassword", "ConnectionPassword", }); + String auth = getProperty(props, new String[] {"auth"}); if (url == null || url.length() == 0) { return beeLine.error("Property \"url\" is required"); @@ -937,14 +939,23 @@ public boolean connect(Properties props) throws IOException { if (username == null) { username = beeLine.getConsoleReader().readLine("Enter username for " + url + ": "); } + props.setProperty("user", username); if (password == null) { password = beeLine.getConsoleReader().readLine("Enter password for " + url + ": ", new Character('*')); } + props.setProperty("password", password); + + if (auth == null) { + auth = beeLine.getOpts().getAuthType(); + } + if (auth != null) { + props.setProperty("auth", auth); + } try { beeLine.getDatabaseConnections().setConnection( - new DatabaseConnection(beeLine, driver, url, username, password)); + new DatabaseConnection(beeLine, driver, url, props)); beeLine.getDatabaseConnection().getConnection(); beeLine.setCompletions(); diff --git beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java index 1de5829..3344208 100644 --- beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java +++ beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java @@ -31,6 +31,7 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.Properties; import java.util.Map; import java.util.Set; import java.util.TreeSet; @@ -44,18 +45,16 @@ private DatabaseMetaData meta; private final String driver; private final String url; - private final String username; - private final String password; + private final Properties props; private Schema schema = null; private Completor sqlCompletor = null; public DatabaseConnection(BeeLine beeLine, String driver, String url, - String username, String password) throws SQLException { + Properties props) throws SQLException { this.beeLine = beeLine; this.driver = driver; - this.username = username; - this.password = password; + this.props = props; this.url = appendHiveVariables(beeLine, url); } @@ -157,7 +156,7 @@ boolean connect() throws SQLException { return beeLine.error(e); } - setConnection(DriverManager.getConnection(getUrl(), username, password)); + setConnection(DriverManager.getConnection(getUrl(), props)); setDatabaseMetaData(getConnection().getMetaData()); try { @@ -323,4 +322,4 @@ public Column(String name) { } } } -} \ No newline at end of file +} diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 36503fa..713342b 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -784,6 +784,7 @@ // HiveServer2 auth configuration HIVE_SERVER2_AUTHENTICATION("hive.server2.authentication", "NONE", new StringsValidator("NOSASL", "NONE", "LDAP", "KERBEROS", "CUSTOM")), + HIVE_SERVER2_ALLOW_USER_SUBSTITUTION("hive.server2.allow.user.substitution", true), HIVE_SERVER2_KERBEROS_KEYTAB("hive.server2.authentication.kerberos.keytab", ""), HIVE_SERVER2_KERBEROS_PRINCIPAL("hive.server2.authentication.kerberos.principal", ""), HIVE_SERVER2_PLAIN_LDAP_URL("hive.server2.authentication.ldap.url", null), diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 7b1c9da..6ce7c1c 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -256,7 +256,7 @@ private void checkBadUrl(String url) throws SQLException { try{ DriverManager.getConnection(url, "", ""); fail("should have thrown IllegalArgumentException but did not "); - }catch(IllegalArgumentException i){ + }catch(SQLException i){ assertTrue(i.getMessage().contains("Bad URL format. Hostname not found " + " in authority part of the url")); } diff --git jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index ef39573..d64ac32 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -18,6 +18,7 @@ package org.apache.hive.jdbc; +import java.io.IOException; import java.sql.Array; import java.sql.Blob; import java.sql.CallableStatement; @@ -47,16 +48,23 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.KerberosSaslHelper; import org.apache.hive.service.auth.PlainSaslHelper; import org.apache.hive.service.auth.SaslQOP; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.TCLIService; +import org.apache.hive.service.cli.thrift.TCancelDelegationTokenReq; +import org.apache.hive.service.cli.thrift.TCancelDelegationTokenResp; import org.apache.hive.service.cli.thrift.TCloseSessionReq; +import org.apache.hive.service.cli.thrift.TGetDelegationTokenReq; +import org.apache.hive.service.cli.thrift.TGetDelegationTokenResp; import org.apache.hive.service.cli.thrift.TOpenSessionReq; import org.apache.hive.service.cli.thrift.TOpenSessionResp; import org.apache.hive.service.cli.thrift.TProtocolVersion; +import org.apache.hive.service.cli.thrift.TRenewDelegationTokenReq; +import org.apache.hive.service.cli.thrift.TRenewDelegationTokenResp; import org.apache.hive.service.cli.thrift.TSessionHandle; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.thrift.TException; @@ -74,6 +82,7 @@ private static final String HIVE_AUTH_TYPE= "auth"; private static final String HIVE_AUTH_QOP = "sasl.qop"; private static final String HIVE_AUTH_SIMPLE = "noSasl"; + private static final String HIVE_AUTH_TOKEN = "delegationToken"; private static final String HIVE_AUTH_USER = "user"; private static final String HIVE_AUTH_PRINCIPAL = "principal"; private static final String HIVE_AUTH_PASSWD = "password"; @@ -102,7 +111,12 @@ public HiveConnection(String uri, Properties info) throws SQLException { loginTimeout = DriverManager.getLoginTimeout(); jdbcURI = uri; // parse the connection uri - Utils.JdbcConnectionParams connParams = Utils.parseURL(jdbcURI); + Utils.JdbcConnectionParams connParams; + try { + connParams = Utils.parseURL(uri); + } catch (IllegalArgumentException e) { + throw new SQLException(e); + } // extract parsed connection parameters: // JDBC URL: jdbc:hive2://:/dbName;sess_var_list?hive_conf_list#hive_var_list // each list: =;= and so on @@ -119,13 +133,17 @@ public HiveConnection(String uri, Properties info) throws SQLException { if (isEmbeddedMode) { client = new EmbeddedThriftBinaryCLIService(); } else { - // extract user/password from JDBC connection properties if its not supplied in the connection URL + // extract user/password from JDBC connection properties if its not supplied in the + // connection URL if (info.containsKey(HIVE_AUTH_USER)) { sessConfMap.put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER)); if (info.containsKey(HIVE_AUTH_PASSWD)) { sessConfMap.put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD)); } } + if (info.containsKey(HIVE_AUTH_TYPE)) { + sessConfMap.put(HIVE_AUTH_TYPE, info.getProperty(HIVE_AUTH_TYPE)); + } // open the client transport openTransport(); } @@ -137,7 +155,7 @@ public HiveConnection(String uri, Properties info) throws SQLException { supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V4); // open client session - openSession(); + openSession(connParams.getSessionVars()); configureConnection(); } @@ -184,14 +202,26 @@ private TTransport createHttpTransport() throws SQLException { return transport; } + /** + * Create transport per the connection options + * Supported transport options are: + * - SASL based transports over + * + Kerberos + * + Delegation token + * + SSL + * + non-SSL + * - Raw (non-SASL) socket + * + * Kerberos and Delegation token supports SASL QOP configurations + */ private TTransport createBinaryTransport() throws SQLException { try { // handle secure connection if specified if (!HIVE_AUTH_SIMPLE.equals(sessConfMap.get(HIVE_AUTH_TYPE))) { // If Kerberos + Map saslProps = new HashMap(); + SaslQOP saslQOP = SaslQOP.AUTH; if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) { - Map saslProps = new HashMap(); - SaslQOP saslQOP = SaslQOP.AUTH; if (sessConfMap.containsKey(HIVE_AUTH_QOP)) { try { saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP)); @@ -206,30 +236,43 @@ private TTransport createBinaryTransport() throws SQLException { sessConfMap.get(HIVE_AUTH_PRINCIPAL), host, HiveAuthFactory.getSocketTransport(host, port, loginTimeout), saslProps); } else { - String userName = sessConfMap.get(HIVE_AUTH_USER); - if ((userName == null) || userName.isEmpty()) { - userName = HIVE_ANONYMOUS_USER; - } - String passwd = sessConfMap.get(HIVE_AUTH_PASSWD); - if ((passwd == null) || passwd.isEmpty()) { - passwd = HIVE_ANONYMOUS_PASSWD; - } - String useSslStr = sessConfMap.get(HIVE_USE_SSL); - if ("true".equalsIgnoreCase(useSslStr)) { - String sslTrustStore = sessConfMap.get(HIVE_SSL_TRUST_STORE); - String sslTrustStorePassword = sessConfMap.get(HIVE_SSL_TRUST_STORE_PASSWORD); - if (sslTrustStore == null || sslTrustStore.isEmpty()) { - transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout); - } else { - transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout, + // If there's a delegation token available then use token based connection + String tokenStr = getClientDelegationToken(sessConfMap); + if (tokenStr != null) { + transport = KerberosSaslHelper.getTokenTransport(tokenStr, + host, HiveAuthFactory.getSocketTransport(host, port, loginTimeout), saslProps); + } else { + // we are using PLAIN Sasl connection with user/password + String userName = sessConfMap.get(HIVE_AUTH_USER); + if ((userName == null) || userName.isEmpty()) { + userName = HIVE_ANONYMOUS_USER; + } + String passwd = sessConfMap.get(HIVE_AUTH_PASSWD); + if ((passwd == null) || passwd.isEmpty()) { + passwd = HIVE_ANONYMOUS_PASSWD; + } + String useSslStr = sessConfMap.get(HIVE_USE_SSL); + if ("true".equalsIgnoreCase(useSslStr)) { + // get SSL socket + String sslTrustStore = sessConfMap.get(HIVE_SSL_TRUST_STORE); + String sslTrustStorePassword = sessConfMap.get(HIVE_SSL_TRUST_STORE_PASSWORD); + if (sslTrustStore == null || sslTrustStore.isEmpty()) { + transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout); + } else { + transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout, sslTrustStore, sslTrustStorePassword); + } + transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport); + } else { + // get non-SSL socket transport + transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout); } - } else { - transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout); - } + // Overlay the SASL transport on top of the base socket transport (SSL or non-SSL) transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport); + } } } else { + // Raw socket connection (non-sasl) transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout); } } catch (SaslException e) { @@ -253,11 +296,32 @@ private boolean isHttpTransportMode() { return false; } - private void openSession() throws SQLException { + // Lookup the delegation token. First in the connection URL, then Configuration + private String getClientDelegationToken(Map jdbcConnConf) + throws SQLException { + String tokenStr = null; + if (HIVE_AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(HIVE_AUTH_TYPE))) { + // check delegation token in job conf if any + try { + tokenStr = ShimLoader.getHadoopShims(). + getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN); + } catch (IOException e) { + throw new SQLException("Error reading token ", e); + } + } + return tokenStr; + } + + private void openSession(Map sessVars) throws SQLException { TOpenSessionReq openReq = new TOpenSessionReq(); // set the session configuration - // openReq.setConfiguration(null); + if (sessVars.containsKey(HiveAuthFactory.HS2_PROXY_USER)) { + Map openConf = new HashMap(); + openConf.put(HiveAuthFactory.HS2_PROXY_USER, + sessVars.get(HiveAuthFactory.HS2_PROXY_USER)); + openReq.setConfiguration(openConf); + } try { TOpenSessionResp openResp = client.OpenSession(openReq); @@ -330,6 +394,44 @@ public void abort(Executor executor) throws SQLException { throw new SQLException("Method not supported"); } + public String getDelegationToken(String owner, String renewer) throws SQLException { + TGetDelegationTokenReq req = new TGetDelegationTokenReq(sessHandle, owner, renewer); + try { + TGetDelegationTokenResp tokenResp = client.GetDelegationToken(req); + Utils.verifySuccess(tokenResp.getStatus()); + return tokenResp.getDelegationToken(); + } catch (TException e) { + throw new SQLException("Could not retrieve token: " + + e.getMessage(), " 08S01", e); + } + } + + public void cancelDelegationToken(String tokenStr) throws SQLException { + TCancelDelegationTokenReq cancelReq = new TCancelDelegationTokenReq(sessHandle, tokenStr); + try { + TCancelDelegationTokenResp cancelResp = + client.CancelDelegationToken(cancelReq); + Utils.verifySuccess(cancelResp.getStatus()); + return; + } catch (TException e) { + throw new SQLException("Could not cancel token: " + + e.getMessage(), " 08S01", e); + } + } + + public void renewDelegationToken(String tokenStr) throws SQLException { + TRenewDelegationTokenReq cancelReq = new TRenewDelegationTokenReq(sessHandle, tokenStr); + try { + TRenewDelegationTokenResp renewResp = + client.RenewDelegationToken(cancelReq); + Utils.verifySuccess(renewResp.getStatus()); + return; + } catch (TException e) { + throw new SQLException("Could not renew token: " + + e.getMessage(), " 08S01", e); + } + } + /* * (non-Javadoc) * diff --git jdbc/src/java/org/apache/hive/jdbc/Utils.java jdbc/src/java/org/apache/hive/jdbc/Utils.java index 4d75d98..f5933ed 100644 --- jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -191,7 +191,7 @@ public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentEx JdbcConnectionParams connParams = new JdbcConnectionParams(); if (!uri.startsWith(URL_PREFIX)) { - throw new IllegalArgumentException("Bad URL format"); + throw new IllegalArgumentException("Bad URL format: Missing prefix " + URL_PREFIX); } // For URLs with no other configuration @@ -244,7 +244,9 @@ public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentEx if (sessVars != null) { Matcher sessMatcher = pattern.matcher(sessVars); while (sessMatcher.find()) { - connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2)); + if (connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2)) != null) { + throw new IllegalArgumentException("Bad URL format: Multiple values for property " + sessMatcher.group(1)); + } } } } diff --git service/if/TCLIService.thrift service/if/TCLIService.thrift index 62a9730..d3bf4ba 100644 --- service/if/TCLIService.thrift +++ service/if/TCLIService.thrift @@ -1005,6 +1005,52 @@ struct TFetchResultsResp { 3: optional TRowSet results } +// GetDelegationToken() +// Retrieve delegation token for the current user +struct TGetDelegationTokenReq { + // session handle + 1: required TSessionHandle sessionHandle + // userid for the proxy user + 2: required string owner + // designated renewer userid + 3: required string renewer +} + +struct TGetDelegationTokenResp { + // status of the request + 1: required TStatus status + // delegation token string + 2: optional string delegationToken +} + +// CancelDelegationToken() +// Cancel the given delegation token +struct TCancelDelegationTokenReq { + // session handle + 1: required TSessionHandle sessionHandle + // delegation token to cancel + 2: required string delegationToken +} + +struct TCancelDelegationTokenResp { + // status of the request + 1: required TStatus status +} + +// RenewDelegationToken() +// Cancel the given delegation token +struct TRenewDelegationTokenReq { + // session handle + 1: required TSessionHandle sessionHandle + // delegation token to renew + 2: required string delegationToken +} + +struct TRenewDelegationTokenResp { + // status of the request + 1: required TStatus status +} + service TCLIService { TOpenSessionResp OpenSession(1:TOpenSessionReq req); @@ -1038,4 +1084,10 @@ service TCLIService { TGetResultSetMetadataResp GetResultSetMetadata(1:TGetResultSetMetadataReq req); TFetchResultsResp FetchResults(1:TFetchResultsReq req); + + TGetDelegationTokenResp GetDelegationToken(1:TGetDelegationTokenReq req); + + TCancelDelegationTokenResp CancelDelegationToken(1:TCancelDelegationTokenReq req); + + TRenewDelegationTokenResp RenewDelegationToken(1:TRenewDelegationTokenReq req); } diff --git service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index d80649f..be51aa7 100644 --- service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge; +import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.thrift.TProcessorFactory; import org.apache.thrift.transport.TSSLTransportFactory; @@ -67,7 +68,10 @@ public String getAuthName() { private HadoopThriftAuthBridge.Server saslServer = null; private String authTypeStr; - HiveConf conf; + private final HiveConf conf; + + public static final String HS2_PROXY_USER = "hive.server2.proxy.user"; + public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken"; public HiveAuthFactory() throws TTransportException { conf = new HiveConf(); @@ -82,6 +86,14 @@ public HiveAuthFactory() throws TTransportException { conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB), conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL) ); + // start delegation token manager + try { + // + saslServer.startDelegationTokenSecretManager(conf, null); + } catch (IOException e) { + throw new TTransportException("Failed to start token manager", e); + } + } } @@ -145,6 +157,10 @@ public String getRemoteUser() { } } + public String getIpAddress() { + return saslServer != null ? saslServer.getRemoteAddress().toString() : null; + } + /* perform kerberos login using the hadoop shim API if the configuration is available */ public static void loginFromKeytab(HiveConf hiveConf) throws IOException { String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL); @@ -199,4 +215,45 @@ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, InetAddress.getByName(hiveHost), params); } + // retrieve delegation token for the given user + public String getDelegationToken(String owner, String renewer) throws HiveSQLException { + if (saslServer == null) { + throw new HiveSQLException( + "Delegation token only supported over kerberos authentication"); + } + + try { + return saslServer.getDelegationTokenWithService(owner, renewer, HS2_CLIENT_TOKEN); + } catch (IOException e) { + throw new HiveSQLException("Error retrieving delegation token for user " + owner, e); + } catch (InterruptedException e) { + throw new HiveSQLException("delegation token retrieval interrupted", e); + } + } + + // cancel given delegation token + public void cancelDelegationToken(String delegationToken) throws HiveSQLException { + if (saslServer == null) { + throw new HiveSQLException( + "Delegation token only supported over kerberos authentication"); + } + try { + saslServer.cancelDelegationToken(delegationToken); + } catch (IOException e) { + throw new HiveSQLException("Error canceling delegation token " + delegationToken, e); + } + } + + public void renewDelegationToken(String delegationToken) throws HiveSQLException { + if (saslServer == null) { + throw new HiveSQLException( + "Delegation token only supported over kerberos authentication"); + } + try { + saslServer.renewDelegationToken(delegationToken); + } catch (IOException e) { + throw new HiveSQLException("Error renewing delegation token " + delegationToken, e); + } + } + } diff --git service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java index 519556c..93ec545 100644 --- service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java +++ service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java @@ -74,5 +74,17 @@ public static TTransport getKerberosTransport(String principal, String host, } } + public static TTransport getTokenTransport(String tokenStr, String host, + final TTransport underlyingTransport, Map saslProps) throws SaslException { + HadoopThriftAuthBridge.Client authBridge = + ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos"); + + try { + return authBridge.createClientTransport(null, host, + "DIGEST", tokenStr, underlyingTransport, saslProps); + } catch (IOException e) { + throw new SaslException("Failed to open client transport", e); + } + } } diff --git service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java index 15b1675..5a4519f 100644 --- service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java +++ service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java @@ -97,20 +97,16 @@ public void handle(Callback[] callbacks) private static class SQLPlainProcessorFactory extends TProcessorFactory { private final ThriftCLIService service; private final HiveConf conf; - private final boolean doAsEnabled; public SQLPlainProcessorFactory(ThriftCLIService service) { super(null); this.service = service; this.conf = service.getHiveConf(); - this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS); } @Override public TProcessor getProcessor(TTransport trans) { - TProcessor baseProcessor = new TCLIService.Processor(service); - return doAsEnabled ? new TUGIContainingProcessor(baseProcessor, conf) : - new TSetIpAddressProcessor(service); + return new TSetIpAddressProcessor(service); } } diff --git service/src/java/org/apache/hive/service/cli/CLIService.java service/src/java/org/apache/hive/service/cli/CLIService.java index 8c85386..3262ac6 100644 --- service/src/java/org/apache/hive/service/cli/CLIService.java +++ service/src/java/org/apache/hive/service/cli/CLIService.java @@ -369,4 +369,28 @@ private void setupStagingDir(String dirPath, boolean isLocal) throws IOException fs.setPermission(scratchDir, fsPermission); } } + + @Override + public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException { + String delegationToken = sessionManager.getSession(sessionHandle). + getDelegationToken(authFactory, owner, renewer); + LOG.info(sessionHandle + ": getDelegationToken()"); + return delegationToken; + } + + @Override + public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + sessionManager.getSession(sessionHandle). + cancelDelegationToken(authFactory, tokenStr); + LOG.info(sessionHandle + ": cancelDelegationToken()"); + } + + @Override + public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + sessionManager.getSession(sessionHandle).renewDelegationToken(authFactory, tokenStr); + LOG.info(sessionHandle + ": renewDelegationToken()"); + } } diff --git service/src/java/org/apache/hive/service/cli/CLIServiceClient.java service/src/java/org/apache/hive/service/cli/CLIServiceClient.java index 14ef54f..9e8542b 100644 --- service/src/java/org/apache/hive/service/cli/CLIServiceClient.java +++ service/src/java/org/apache/hive/service/cli/CLIServiceClient.java @@ -22,6 +22,8 @@ import java.util.List; import java.util.Map; +import org.apache.hive.service.auth.HiveAuthFactory; + /** * CLIServiceClient. @@ -158,4 +160,16 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 1000); } + @Override + public abstract String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException; + + @Override + public abstract void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException; + + @Override + public abstract void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException; + } diff --git service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java index 9dca874..8b960e2 100644 --- service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java +++ service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java @@ -21,6 +21,8 @@ import java.util.List; import java.util.Map; +import org.apache.hive.service.auth.HiveAuthFactory; + /** * EmbeddedCLIServiceClient. @@ -188,4 +190,22 @@ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientatio return cliService.fetchResults(opHandle, orientation, maxRows); } + + @Override + public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException { + return cliService.getDelegationToken(sessionHandle, authFactory, owner, renewer); + } + + @Override + public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + cliService.cancelDelegationToken(sessionHandle, authFactory, tokenStr); + } + + @Override + public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + cliService.renewDelegationToken(sessionHandle, authFactory, tokenStr); + } } diff --git service/src/java/org/apache/hive/service/cli/ICLIService.java service/src/java/org/apache/hive/service/cli/ICLIService.java index f647ce6..c4ebed5 100644 --- service/src/java/org/apache/hive/service/cli/ICLIService.java +++ service/src/java/org/apache/hive/service/cli/ICLIService.java @@ -23,6 +23,8 @@ +import org.apache.hive.service.auth.HiveAuthFactory; + public interface ICLIService { public abstract SessionHandle openSession(String username, String password, @@ -91,4 +93,14 @@ public abstract RowSet fetchResults(OperationHandle opHandle, FetchOrientation o public abstract RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException; + public abstract String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException; + + public abstract void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException; + + public abstract void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException; + + } diff --git service/src/java/org/apache/hive/service/cli/session/HiveSession.java service/src/java/org/apache/hive/service/cli/session/HiveSession.java index 00058cc..5d0b1b8 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSession.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSession.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.GetInfoType; import org.apache.hive.service.cli.GetInfoValue; @@ -179,4 +180,13 @@ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientatio public String getUserName(); public void setUserName(String userName); + + public String getDelegationToken(HiveAuthFactory authFactory, String owner, + String renewer) throws HiveSQLException; + + public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException; + + public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException; } diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index cfda752..7f13974 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.history.HiveHistory; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.common.util.HiveVersionInfo; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.GetInfoType; import org.apache.hive.service.cli.GetInfoValue; @@ -437,4 +438,22 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { protected HiveSession getSession() { return this; } + + @Override + public String getDelegationToken(HiveAuthFactory authFactory, String owner, String renewer) + throws HiveSQLException { + throw new HiveSQLException("Delegation token access is only allowed with impersonation"); + } + + @Override + public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException { + throw new HiveSQLException("Delegation token access is only allowed with impersonation"); + } + + @Override + public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException { + throw new HiveSQLException("Delegation token access is only allowed with impersonation"); + } } diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java index 708f4e4..fe0fe7d 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.HiveSQLException; /** @@ -146,5 +147,22 @@ public void setProxySession(HiveSession proxySession) { this.proxySession = proxySession; } + @Override + public String getDelegationToken(HiveAuthFactory authFactory, String owner, + String renewer) throws HiveSQLException { + return authFactory.getDelegationToken(owner, renewer); + } + + @Override + public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException { + authFactory.cancelDelegationToken(tokenStr); + } + + @Override + public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException { + authFactory.renewDelegationToken(tokenStr); + } } diff --git service/src/java/org/apache/hive/service/cli/session/SessionManager.java service/src/java/org/apache/hive/service/cli/session/SessionManager.java index e262b72..1febbb1 100644 --- service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -18,6 +18,7 @@ package org.apache.hive.service.cli.session; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -31,6 +32,9 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.hooks.HookUtils; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.CompositeService; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.SessionHandle; @@ -102,9 +106,6 @@ public SessionHandle openSession(String username, String password, Map sessionConf, boolean withImpersonation, String delegationToken) throws HiveSQLException { - if (username == null) { - username = threadLocalUserName.get(); - } HiveSession session; if (withImpersonation) { HiveSessionImplwithUGI hiveSessionUgi = new HiveSessionImplwithUGI(username, password, @@ -162,6 +163,10 @@ private void clearIpAddress() { threadLocalIpAddress.remove(); } + public static String getIpAddress() { + return threadLocalIpAddress.get(); + } + private static ThreadLocal threadLocalUserName = new ThreadLocal(){ @Override protected synchronized String initialValue() { @@ -177,6 +182,10 @@ private void clearUserName() { threadLocalUserName.remove(); } + public static String getUserName() { + return threadLocalUserName.get(); + } + // execute session hooks private void executeSessionHooks(HiveSession session) throws Exception { List sessionHooks = HookUtils.getHooks(hiveConf, @@ -191,3 +200,4 @@ private void executeSessionHooks(HiveSession session) throws Exception { } } + diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index 9df110e..fd0ac96 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -29,6 +29,8 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.service.AbstractService; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.CLIService; @@ -41,6 +43,7 @@ import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.TableSchema; +import org.apache.hive.service.cli.session.SessionManager; import org.apache.thrift.TException; import org.apache.thrift.server.TServer; @@ -111,6 +114,77 @@ public synchronized void stop() { super.stop(); } + @Override + public TGetDelegationTokenResp GetDelegationToken(TGetDelegationTokenReq req) + throws TException { + TGetDelegationTokenResp resp = new TGetDelegationTokenResp(); + + if (hiveAuthFactory == null) { + resp.setStatus(unsecureTokenErrorStatus()); + } else { + try { + String token = cliService.getDelegationToken( + new SessionHandle(req.getSessionHandle()), + hiveAuthFactory, req.getOwner(), req.getRenewer()); + if (token == null || token.isEmpty()) { + throw new HiveSQLException("Got empty token"); + } + resp.setDelegationToken(token); + resp.setStatus(OK_STATUS); + } catch (HiveSQLException e) { + e.printStackTrace(); + TStatus tokenErrorStatus = HiveSQLException.toTStatus(e); + tokenErrorStatus.setSqlState("42000"); + resp.setStatus(tokenErrorStatus); + } + } + return resp; + } + + @Override + public TCancelDelegationTokenResp CancelDelegationToken(TCancelDelegationTokenReq req) + throws TException { + TCancelDelegationTokenResp resp = new TCancelDelegationTokenResp(); + + if (hiveAuthFactory == null) { + resp.setStatus(unsecureTokenErrorStatus()); + } else { + try { + cliService.cancelDelegationToken(new SessionHandle(req.getSessionHandle()), + hiveAuthFactory, req.getDelegationToken()); + } catch (HiveSQLException e) { + e.printStackTrace(); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + } + return resp; + } + + @Override + public TRenewDelegationTokenResp RenewDelegationToken(TRenewDelegationTokenReq req) + throws TException { + TRenewDelegationTokenResp resp = new TRenewDelegationTokenResp(); + if (hiveAuthFactory == null) { + resp.setStatus(unsecureTokenErrorStatus()); + } else { + try { + cliService.renewDelegationToken(new SessionHandle(req.getSessionHandle()), + hiveAuthFactory, req.getDelegationToken()); + } catch (HiveSQLException e) { + e.printStackTrace(); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + } + return resp; + + } + + private TStatus unsecureTokenErrorStatus() { + TStatus errorStatus = new TStatus(TStatusCode.ERROR_STATUS); + errorStatus.setErrorMessage("Delegation token only supported over remote " + + "client with kerberos authentication"); + return errorStatus; + } @Override public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException { @@ -128,13 +202,25 @@ public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException { return resp; } - private String getUserName(TOpenSessionReq req) { + private String getIpAddress() { + if(hiveAuthFactory != null) { + return hiveAuthFactory.getIpAddress(); + } + return SessionManager.getIpAddress(); + } + + private String getUserName(TOpenSessionReq req) throws HiveSQLException { + String userName; if (hiveAuthFactory != null && hiveAuthFactory.getRemoteUser() != null) { - return hiveAuthFactory.getRemoteUser(); + userName = hiveAuthFactory.getRemoteUser(); } else { - return req.getUsername(); + userName = SessionManager.getUserName(); } + if (userName == null) { + userName = req.getUsername(); + } + return getProxyUser(userName, req.getConfiguration(), getIpAddress()); } SessionHandle getSessionHandle(TOpenSessionReq req) @@ -143,14 +229,8 @@ SessionHandle getSessionHandle(TOpenSessionReq req) String userName = getUserName(req); SessionHandle sessionHandle = null; - if ( - cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION) - .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString()) - && - cliService.getHiveConf(). - getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) - ) - { + if (cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) && + (userName != null)) { String delegationTokenStr = null; try { delegationTokenStr = cliService.getDelegationTokenFromMetaStore(userName); @@ -399,4 +479,48 @@ public TFetchResultsResp FetchResults(TFetchResultsReq req) throws TException { @Override public abstract void run(); + + /** + * If the proxy user name is provided then check privileges to substitute the user. + * @param realUser + * @param sessionConf + * @param ipAddress + * @return + * @throws HiveSQLException + */ + private String getProxyUser(String realUser, Map sessionConf, + String ipAddress) throws HiveSQLException { + if (sessionConf == null || !sessionConf.containsKey(HiveAuthFactory.HS2_PROXY_USER)) { + return realUser; + } + + // Extract the proxy user name and check if we are allowed to do the substitution + String proxyUser = sessionConf.get(HiveAuthFactory.HS2_PROXY_USER); + if (!hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ALLOW_USER_SUBSTITUTION)) { + throw new HiveSQLException("Proxy user substitution is not allowed"); + } + + // If there's no authentication, then directly substitute the user + if (HiveAuthFactory.AuthTypes.NONE.toString(). + equalsIgnoreCase(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION))) { + return proxyUser; + } + + // Verify proxy user privilege of the realUser for the proxyUser + try { + UserGroupInformation sessionUgi; + if (!ShimLoader.getHadoopShims().isSecurityEnabled()) { + sessionUgi = ShimLoader.getHadoopShims().createProxyUser(realUser); + } else { + sessionUgi = ShimLoader.getHadoopShims().createRemoteUser(realUser, null); + } + ShimLoader.getHadoopShims(). + authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf); + return proxyUser; + } catch (IOException e) { + throw new HiveSQLException("Failed to validate proxy privilage of " + realUser + + " for " + proxyUser, e); + } + } } + diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java index 9bb2a0f..692f0a6 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java @@ -21,6 +21,7 @@ import java.util.List; import java.util.Map; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.CLIServiceClient; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.GetInfoType; @@ -31,6 +32,7 @@ import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.TableSchema; +import org.apache.thrift.TException; /** * ThriftCLIServiceClient. @@ -388,4 +390,48 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { // TODO: set the correct default fetch size return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 10000); } + + @Override + public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException { + TGetDelegationTokenReq req = new TGetDelegationTokenReq( + sessionHandle.toTSessionHandle(), owner, renewer); + try { + TGetDelegationTokenResp tokenResp = cliService.GetDelegationToken(req); + checkStatus(tokenResp.getStatus()); + return tokenResp.getDelegationToken(); + } catch (Exception e) { + throw new HiveSQLException(e); + } + } + + @Override + public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + TCancelDelegationTokenReq cancelReq = new TCancelDelegationTokenReq( + sessionHandle.toTSessionHandle(), tokenStr); + try { + TCancelDelegationTokenResp cancelResp = + cliService.CancelDelegationToken(cancelReq); + checkStatus(cancelResp.getStatus()); + return; + } catch (TException e) { + throw new HiveSQLException(e); + } + } + + @Override + public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + TRenewDelegationTokenReq cancelReq = new TRenewDelegationTokenReq( + sessionHandle.toTSessionHandle(), tokenStr); + try { + TRenewDelegationTokenResp renewResp = + cliService.RenewDelegationToken(cancelReq); + checkStatus(renewResp.getStatus()); + return; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } } diff --git service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java index 8fa4afd..fb784aa 100644 --- service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java +++ service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java @@ -45,6 +45,6 @@ public void testDoAsSetting(){ tcliService.init(hconf); TProcessorFactory procFactory = PlainSaslHelper.getPlainProcessorFactory(tcliService); assertEquals("doAs enabled processor for unsecure mode", - procFactory.getProcessor(null).getClass(), TUGIContainingProcessor.class); + procFactory.getProcessor(null).getClass(), TSetIpAddressProcessor.class); } } diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java index 2fac800..a923988 100644 --- service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java +++ service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java @@ -19,12 +19,15 @@ package org.apache.hive.service.cli.session; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import junit.framework.Assert; import junit.framework.TestCase; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; @@ -34,7 +37,7 @@ public class TestSessionHooks extends TestCase { - public static final String SESSION_USER_NAME = "user1"; + public static String SESSION_USER_NAME = "user1"; private EmbeddedThriftBinaryCLIService service; private ThriftCLIServiceClient client; @@ -56,6 +59,7 @@ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLExcepti @Before public void setUp() throws Exception { super.setUp(); + SessionHookTest.runCount.set(0); System.setProperty(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, TestSessionHooks.SessionHookTest.class.getName()); service = new EmbeddedThriftBinaryCLIService(); @@ -70,4 +74,19 @@ public void testSessionHook () throws Exception { Assert.assertEquals(1, SessionHookTest.runCount.get()); client.closeSession(sessionHandle); } + + /*** + * Create session with proxy user property. Verify the effective session user + * @throws Exception + */ + @Test + public void testProxyUser() throws Exception { + String connectingUser = "user1"; + String proxyUser = System.getProperty("user.name"); + MapsessConf = new HashMap(); + sessConf.put(HiveAuthFactory.HS2_PROXY_USER, proxyUser); + SESSION_USER_NAME = proxyUser; + SessionHandle sessionHandle = client.openSession(connectingUser, "foobar", sessConf); + client.closeSession(sessionHandle); + } } diff --git shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java index 6ff1a84..1181b00 100644 --- shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java +++ shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java @@ -539,6 +539,12 @@ public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenS } @Override + public String addServiceToToken(String tokenStr, String tokenService) throws IOException { + throw new UnsupportedOperationException("Tokens are not supported in current hadoop version"); + } + + + @Override public T doAs(UserGroupInformation ugi, PrivilegedExceptionAction pvea) throws IOException, InterruptedException { try { @@ -620,6 +626,11 @@ public void remove() { } @Override + public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi, + String ipAddress, Configuration conf) throws IOException { + // This hadoop version doesn't have proxy verification + } + public boolean isSecurityEnabled() { return false; } diff --git shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java index 84f3ddc..0170873 100644 --- shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java +++ shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java @@ -63,6 +63,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenSelector; @@ -458,12 +459,39 @@ public String getTokenStrForm(String tokenSignature) throws IOException { return token != null ? token.encodeToUrlString() : null; } + /** + * Create a delegation token object for the given token string and service. + * Add the token to given UGI + */ @Override public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService) throws IOException { + Token delegationToken = createToken(tokenStr, tokenService); + ugi.addToken(delegationToken); + } + + /** + * Add a given service to delegation token string. + */ + @Override + public String addServiceToToken(String tokenStr, String tokenService) + throws IOException { + Token delegationToken = createToken(tokenStr, tokenService); + return delegationToken.encodeToUrlString(); + } + + /** + * Create a new token using the given string and service + * @param tokenStr + * @param tokenService + * @return + * @throws IOException + */ + private Token createToken(String tokenStr, String tokenService) + throws IOException { Token delegationToken = new Token(); delegationToken.decodeFromUrlString(tokenStr); delegationToken.setService(new Text(tokenService)); - ugi.addToken(delegationToken); + return delegationToken; } @Override @@ -497,6 +525,13 @@ public UserGroupInformation createProxyUser(String userName) throws IOException } @Override + public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi, + String ipAddress, Configuration conf) throws IOException { + ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, realUserUgi), + ipAddress, conf); + } + + @Override public boolean isSecurityEnabled() { return UserGroupInformation.isSecurityEnabled(); } diff --git shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java index dc89de1..a31d624 100644 --- shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java +++ shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java @@ -43,6 +43,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport; import org.apache.hadoop.security.SaslRpcServer; import org.apache.hadoop.security.SaslRpcServer.AuthMethod; @@ -401,6 +402,13 @@ public String run() throws IOException { } @Override + public String getDelegationTokenWithService(String owner, String renewer, String service) + throws IOException, InterruptedException { + String token = getDelegationToken(owner, renewer); + return ShimLoader.getHadoopShims().addServiceToToken(token, service); + } + + @Override public long renewDelegationToken(String tokenStrForm) throws IOException { if (!authenticationMethod.get().equals(AuthenticationMethod.KERBEROS)) { throw new AuthorizationException( diff --git shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java index 0d5615c..97dec33 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -228,7 +228,7 @@ public URI getHarUri(URI original, URI base, URI originalBase) * @return the string form of the token found * @throws IOException */ - String getTokenStrForm(String tokenSignature) throws IOException; + public String getTokenStrForm(String tokenSignature) throws IOException; /** * Add a delegation token to the given ugi @@ -237,9 +237,18 @@ public URI getHarUri(URI original, URI base, URI originalBase) * @param tokenService * @throws IOException */ - void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService) + public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService) throws IOException; + /** + * Add given service to the string format token + * @param tokenStr + * @param tokenService + * @return + * @throws IOException + */ + public String addServiceToToken(String tokenStr, String tokenService) + throws IOException; enum JobTrackerState { INITIALIZING, RUNNING }; @@ -345,7 +354,14 @@ public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration co * @param userName * @return */ - UserGroupInformation createProxyUser(String userName) throws IOException; + public UserGroupInformation createProxyUser(String userName) throws IOException; + + /** + * Verify proxy access to given UGI for given user + * @param ugi + */ + public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi, + String ipAddress, Configuration conf) throws IOException; /** * The method sets to set the partition file has a different signature between diff --git shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index 03f4e51..5f9e951 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -91,9 +91,11 @@ public abstract TTransport createClientTransport( public abstract InetAddress getRemoteAddress(); public abstract void startDelegationTokenSecretManager(Configuration conf, Object hmsHandler) throws IOException; - public abstract String getRemoteUser(); public abstract String getDelegationToken(String owner, String renewer) - throws IOException, InterruptedException; + throws IOException, InterruptedException; + public abstract String getDelegationTokenWithService(String owner, String renewer, String service) + throws IOException, InterruptedException; + public abstract String getRemoteUser(); public abstract long renewDelegationToken(String tokenStrForm) throws IOException; public abstract void cancelDelegationToken(String tokenStrForm) throws IOException; }