diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java index da9977e..96afd70 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -499,6 +499,7 @@ boolean initArgs(String[] args) { List commands = new LinkedList(); List files = new LinkedList(); String driver = null, user = null, pass = null, url = null, cmd = null; + String auth = null; for (int i = 0; i < args.length; i++) { if (args[i].equals("--help") || args[i].equals("-h")) { @@ -541,6 +542,9 @@ boolean initArgs(String[] args) { driver = args[i++ + 1]; } else if (args[i].equals("-n")) { user = args[i++ + 1]; + } else if (args[i].equals("-a")) { + auth = args[i++ + 1]; + getOpts().setAuthType(auth); } else if (args[i].equals("-p")) { pass = args[i++ + 1]; } else if (args[i].equals("-u")) { @@ -569,7 +573,8 @@ boolean initArgs(String[] args) { + url + " " + (user == null || user.length() == 0 ? "''" : user) + " " + (pass == null || pass.length() == 0 ? "''" : pass) + " " - + (driver == null ? "" : driver); + + (driver == null ? "" : driver) + " " + + (auth == null || auth.length() == 0 ? "''" : auth); debug("issuing: " + com); dispatch(com); } diff --git beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java index 0884b16..42abb86 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java +++ beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java @@ -82,6 +82,8 @@ private String historyFile = new File(saveDir(), "history").getAbsolutePath(); private String scriptFile = null; + private String authType = null; + private Map hiveVariables = new HashMap(); @@ -443,5 +445,13 @@ public void setAllowMultiLineCommand(boolean allowMultiLineCommand) { this.allowMultiLineCommand = allowMultiLineCommand; } + public String getAuthType() { + return authType; + } + + public void setAuthType(String authType) { + this.authType = authType; + } + } diff --git beeline/src/java/org/apache/hive/beeline/Commands.java beeline/src/java/org/apache/hive/beeline/Commands.java index 91ee3b3..88b3232 100644 --- beeline/src/java/org/apache/hive/beeline/Commands.java +++ beeline/src/java/org/apache/hive/beeline/Commands.java @@ -872,6 +872,7 @@ public boolean connect(String line) throws Exception { if (pass != null) { props.setProperty("password", pass); } + return connect(props); } @@ -918,6 +919,7 @@ public boolean connect(Properties props) throws IOException { "javax.jdo.option.ConnectionPassword", "ConnectionPassword", }); + String auth = getProperty(props, new String[] {"auth"}); if (url == null || url.length() == 0) { return beeLine.error("Property \"url\" is required"); @@ -933,14 +935,23 @@ public boolean connect(Properties props) throws IOException { if (username == null) { username = beeLine.getConsoleReader().readLine("Enter username for " + url + ": "); } + props.setProperty("user", username); if (password == null) { password = beeLine.getConsoleReader().readLine("Enter password for " + url + ": ", new Character('*')); } + props.setProperty("password", password); + + if (auth == null) { + auth = beeLine.getOpts().getAuthType(); + } + if (auth != null) { + props.setProperty("auth", auth); + } try { beeLine.getDatabaseConnections().setConnection( - new DatabaseConnection(beeLine, driver, url, username, password)); + new DatabaseConnection(beeLine, driver, url, props)); beeLine.getDatabaseConnection().getConnection(); beeLine.setCompletions(); diff --git beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java index 1de5829..3344208 100644 --- beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java +++ beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java @@ -31,6 +31,7 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.Properties; import java.util.Map; import java.util.Set; import java.util.TreeSet; @@ -44,18 +45,16 @@ private DatabaseMetaData meta; private final String driver; private final String url; - private final String username; - private final String password; + private final Properties props; private Schema schema = null; private Completor sqlCompletor = null; public DatabaseConnection(BeeLine beeLine, String driver, String url, - String username, String password) throws SQLException { + Properties props) throws SQLException { this.beeLine = beeLine; this.driver = driver; - this.username = username; - this.password = password; + this.props = props; this.url = appendHiveVariables(beeLine, url); } @@ -157,7 +156,7 @@ boolean connect() throws SQLException { return beeLine.error(e); } - setConnection(DriverManager.getConnection(getUrl(), username, password)); + setConnection(DriverManager.getConnection(getUrl(), props)); setDatabaseMetaData(getConnection().getMetaData()); try { @@ -323,4 +322,4 @@ public Column(String name) { } } } -} \ No newline at end of file +} diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index bc9d630..d771622 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -763,6 +763,7 @@ // HiveServer2 auth configuration + HIVE_SERVER2_ALLOW_USER_SUBSTITUTION("hive.server2.allow.user.substitution", true), HIVE_SERVER2_AUTHENTICATION("hive.server2.authentication", "NONE"), HIVE_SERVER2_KERBEROS_KEYTAB("hive.server2.authentication.kerberos.keytab", ""), HIVE_SERVER2_KERBEROS_PRINCIPAL("hive.server2.authentication.kerberos.principal", ""), diff --git jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index f155686..1717e3f 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -18,6 +18,7 @@ package org.apache.hive.jdbc; +import java.io.IOException; import java.sql.Array; import java.sql.Blob; import java.sql.CallableStatement; @@ -46,15 +47,23 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.KerberosSaslHelper; import org.apache.hive.service.auth.PlainSaslHelper; import org.apache.hive.service.auth.SaslQOP; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.TCLIService; +import org.apache.hive.service.cli.thrift.TCancelDelegationTokenReq; +import org.apache.hive.service.cli.thrift.TCancelDelegationTokenResp; import org.apache.hive.service.cli.thrift.TCloseSessionReq; +import org.apache.hive.service.cli.thrift.TGetDelegationTokenReq; +import org.apache.hive.service.cli.thrift.TGetDelegationTokenResp; import org.apache.hive.service.cli.thrift.TOpenSessionReq; import org.apache.hive.service.cli.thrift.TOpenSessionResp; import org.apache.hive.service.cli.thrift.TProtocolVersion; +import org.apache.hive.service.cli.thrift.TRenewDelegationTokenReq; +import org.apache.hive.service.cli.thrift.TRenewDelegationTokenResp; import org.apache.hive.service.cli.thrift.TSessionHandle; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.thrift.TException; @@ -73,11 +82,14 @@ private static final String HIVE_AUTH_TYPE= "auth"; private static final String HIVE_AUTH_QOP = "sasl.qop"; private static final String HIVE_AUTH_SIMPLE = "noSasl"; + private static final String HIVE_AUTH_TOKEN = "delegationToken"; private static final String HIVE_AUTH_USER = "user"; private static final String HIVE_AUTH_PRINCIPAL = "principal"; private static final String HIVE_AUTH_PASSWD = "password"; private static final String HIVE_ANONYMOUS_USER = "anonymous"; private static final String HIVE_ANONYMOUS_PASSWD = "anonymous"; + public static final String HIVE_CONF_TOKEN = "hive.server2.delegation.token"; + private final String jdbcURI; private final String host; private final int port; @@ -95,7 +107,12 @@ public HiveConnection(String uri, Properties info) throws SQLException { jdbcURI = uri; // parse the connection uri - Utils.JdbcConnectionParams connParams = Utils.parseURL(jdbcURI); + Utils.JdbcConnectionParams connParams; + try { + connParams = Utils.parseURL(uri); + } catch (IllegalArgumentException e) { + throw new SQLException(e); + } // extract parsed connection parameters: // JDBC URL: jdbc:hive2://:/dbName;sess_var_list?hive_conf_list#hive_var_list // each list: =;= and so on @@ -112,13 +129,17 @@ public HiveConnection(String uri, Properties info) throws SQLException { if (isEmbeddedMode) { client = new EmbeddedThriftBinaryCLIService(); } else { - // extract user/password from JDBC connection properties if its not supplied in the connection URL + // extract user/password from JDBC connection properties if its not supplied in the + // connection URL if (info.containsKey(HIVE_AUTH_USER)) { sessConfMap.put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER)); if (info.containsKey(HIVE_AUTH_PASSWD)) { sessConfMap.put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD)); } } + if (info.containsKey(HIVE_AUTH_TYPE)) { + sessConfMap.put(HIVE_AUTH_TYPE, info.getProperty(HIVE_AUTH_TYPE)); + } // open the client transport openTransport(); } @@ -129,7 +150,7 @@ public HiveConnection(String uri, Properties info) throws SQLException { supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3); // open client session - openSession(); + openSession(connParams.getSessionVars()); configureConnection(); } @@ -179,34 +200,39 @@ private TTransport createHttpTransport() throws SQLException { private TTransport createBinaryTransport() throws SQLException { transport = new TSocket(host, port); // handle secure connection if specified - if (!sessConfMap.containsKey(HIVE_AUTH_TYPE) - || !sessConfMap.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)) { + if (!HIVE_AUTH_SIMPLE.equals(sessConfMap.get(HIVE_AUTH_TYPE))) { + Map saslProps = new HashMap(); + SaslQOP saslQOP = SaslQOP.AUTH; + if(sessConfMap.containsKey(HIVE_AUTH_QOP)) { + try { + saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP)); + } catch (IllegalArgumentException e) { + throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), "42000", e); + } + } + saslProps.put(Sasl.QOP, saslQOP.toString()); + saslProps.put(Sasl.SERVER_AUTH, "true"); try { // If Kerberos if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) { - Map saslProps = new HashMap(); - SaslQOP saslQOP = SaslQOP.AUTH; - if(sessConfMap.containsKey(HIVE_AUTH_QOP)) { - try { - saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP)); - } catch (IllegalArgumentException e) { - throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), "42000", e); - } - } - saslProps.put(Sasl.QOP, saslQOP.toString()); - saslProps.put(Sasl.SERVER_AUTH, "true"); transport = KerberosSaslHelper.getKerberosTransport( sessConfMap.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps); } else { - String userName = sessConfMap.get(HIVE_AUTH_USER); - if ((userName == null) || userName.isEmpty()) { - userName = HIVE_ANONYMOUS_USER; - } - String passwd = sessConfMap.get(HIVE_AUTH_PASSWD); - if ((passwd == null) || passwd.isEmpty()) { - passwd = HIVE_ANONYMOUS_PASSWD; + String tokenStr = getClientDelegationToken(sessConfMap); + if (tokenStr != null) { + transport = KerberosSaslHelper.getTokenTransport(tokenStr, + host, transport, saslProps); + } else { + String userName = sessConfMap.get(HIVE_AUTH_USER); + if ((userName == null) || userName.isEmpty()) { + userName = HIVE_ANONYMOUS_USER; + } + String passwd = sessConfMap.get(HIVE_AUTH_PASSWD); + if ((passwd == null) || passwd.isEmpty()) { + passwd = HIVE_ANONYMOUS_PASSWD; + } + transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport); } - transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport); } } catch (SaslException e) { throw new SQLException("Could not create secure connection to " @@ -227,11 +253,31 @@ private boolean isHttpTransportMode() { return false; } - private void openSession() throws SQLException { + // Lookup the delegation token. First in the connection URL, then Configuration + private String getClientDelegationToken(Map jdbcConnConf) + throws SQLException { + String tokenStr = null; + if (HIVE_AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(HIVE_AUTH_TYPE))) { + // check delegation token in job conf if any + try { + tokenStr = ShimLoader.getHadoopShims().getTokenStrForm(HIVE_CONF_TOKEN); + } catch (IOException e) { + throw new SQLException("Error reading token ", e); + } + } + return tokenStr; + } + + private void openSession(Map sessVars) throws SQLException { TOpenSessionReq openReq = new TOpenSessionReq(); // set the session configuration - // openReq.setConfiguration(null); + if (sessVars.containsKey(HiveAuthFactory.HS2_PROXY_USER)) { + Map openConf = new HashMap(); + openConf.put(HiveAuthFactory.HS2_PROXY_USER, + sessVars.get(HiveAuthFactory.HS2_PROXY_USER)); + openReq.setConfiguration(openConf); + } try { TOpenSessionResp openResp = client.OpenSession(openReq); @@ -304,6 +350,44 @@ public void abort(Executor executor) throws SQLException { throw new SQLException("Method not supported"); } + public String getDelegationToken(String owner, String renewer) throws SQLException { + TGetDelegationTokenReq req = new TGetDelegationTokenReq(sessHandle, owner, renewer); + try { + TGetDelegationTokenResp tokenResp = client.GetDelegationToken(req); + Utils.verifySuccess(tokenResp.getStatus()); + return tokenResp.getDelegationToken(); + } catch (TException e) { + throw new SQLException("Could not retrieve token: " + + e.getMessage(), " 08S01", e); + } + } + + public void cancelDelegationToken(String tokenStr) throws SQLException { + TCancelDelegationTokenReq cancelReq = new TCancelDelegationTokenReq(sessHandle, tokenStr); + try { + TCancelDelegationTokenResp cancelResp = + client.CancelDelegationToken(cancelReq); + Utils.verifySuccess(cancelResp.getStatus()); + return; + } catch (TException e) { + throw new SQLException("Could not cancel token: " + + e.getMessage(), " 08S01", e); + } + } + + public void renewDelegationToken(String tokenStr) throws SQLException { + TRenewDelegationTokenReq cancelReq = new TRenewDelegationTokenReq(sessHandle, tokenStr); + try { + TRenewDelegationTokenResp renewResp = + client.RenewDelegationToken(cancelReq); + Utils.verifySuccess(renewResp.getStatus()); + return; + } catch (TException e) { + throw new SQLException("Could not renew token: " + + e.getMessage(), " 08S01", e); + } + } + /* * (non-Javadoc) * diff --git jdbc/src/java/org/apache/hive/jdbc/Utils.java jdbc/src/java/org/apache/hive/jdbc/Utils.java index 45de290..5ba758e 100644 --- jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -189,7 +189,7 @@ public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentEx JdbcConnectionParams connParams = new JdbcConnectionParams(); if (!uri.startsWith(URL_PREFIX)) { - throw new IllegalArgumentException("Bad URL format"); + throw new IllegalArgumentException("Bad URL format: Missing prefix " + URL_PREFIX); } // For URLs with no other configuration @@ -242,7 +242,9 @@ public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentEx if (sessVars != null) { Matcher sessMatcher = pattern.matcher(sessVars); while (sessMatcher.find()) { - connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2)); + if (connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2)) != null) { + throw new IllegalArgumentException("Bad URL format: Multiple values for property " + sessMatcher.group(1)); + } } } } diff --git jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java index ddfc087..d21a761 100644 --- jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java +++ jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -217,7 +217,7 @@ private void checkBadUrl(String url) throws SQLException { try{ DriverManager.getConnection(url, "", ""); fail("should have thrown IllegalArgumentException but did not "); - }catch(IllegalArgumentException i){ + }catch(SQLException i){ assertTrue(i.getMessage().contains("Bad URL format. Hostname not found " + " in authority part of the url")); } diff --git service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index 5a66a6c..3fca642 100644 --- service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge; +import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.thrift.TProcessorFactory; import org.apache.thrift.transport.TTransportException; @@ -61,7 +62,9 @@ public String getAuthName() { private HadoopThriftAuthBridge.Server saslServer = null; private String authTypeStr; - HiveConf conf; + private final HiveConf conf; + + public static final String HS2_PROXY_USER = "hive.server2.proxy.user"; public HiveAuthFactory() throws TTransportException { conf = new HiveConf(); @@ -76,6 +79,14 @@ public HiveAuthFactory() throws TTransportException { conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB), conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL) ); + // start delegation token manager + try { + // + saslServer.startDelegationTokenSecretManager(conf, null); + } catch (IOException e) { + throw new TTransportException("Failed to start token manager", e); + } + } } @@ -153,4 +164,46 @@ public static void loginFromKeytab(HiveConf hiveConf) throws IOException { } } + // retrieve delegation token for the given user + public String getDelegationToken(String owner, String renewer) throws HiveSQLException { + if (saslServer == null) { + throw new HiveSQLException( + "Delegation token only supported over kerberos authentication"); + } + + try { + return saslServer.getDelegationToken(owner, renewer); + } catch (IOException e) { + throw new HiveSQLException("Error retrieving delegation token for user " + owner, e); + } catch (InterruptedException e) { + throw new HiveSQLException("delegation token retrieval interrupted", e); + } + } + + // cancel given delegation token + public void cancelDelegationToken(String delegationToken) throws HiveSQLException { + if (saslServer == null) { + throw new HiveSQLException( + "Delegation token only supported over kerberos authentication"); + } + try { + saslServer.cancelDelegationToken(delegationToken); + } catch (IOException e) { + throw new HiveSQLException("Error canceling delegation token " + delegationToken, e); + } + } + + public void renewDelegationToken(String delegationToken) throws HiveSQLException { + if (saslServer == null) { + throw new HiveSQLException( + "Delegation token only supported over kerberos authentication"); + } + try { + saslServer.renewDelegationToken(delegationToken); + } catch (IOException e) { + throw new HiveSQLException("Error renewing delegation token " + delegationToken, e); + } + + } + } diff --git service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java index 519556c..93ec545 100644 --- service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java +++ service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java @@ -74,5 +74,17 @@ public static TTransport getKerberosTransport(String principal, String host, } } + public static TTransport getTokenTransport(String tokenStr, String host, + final TTransport underlyingTransport, Map saslProps) throws SaslException { + HadoopThriftAuthBridge.Client authBridge = + ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos"); + + try { + return authBridge.createClientTransport(null, host, + "DIGEST", tokenStr, underlyingTransport, saslProps); + } catch (IOException e) { + throw new SaslException("Failed to open client transport", e); + } + } } diff --git service/src/java/org/apache/hive/service/cli/CLIService.java service/src/java/org/apache/hive/service/cli/CLIService.java index 1a7f338..79b0b06 100644 --- service/src/java/org/apache/hive/service/cli/CLIService.java +++ service/src/java/org/apache/hive/service/cli/CLIService.java @@ -341,4 +341,28 @@ public synchronized String getDelegationTokenFromMetaStore(String owner) } } } + + @Override + public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException { + String delegationToken = sessionManager.getSession(sessionHandle). + getDelegationToken(authFactory, owner, renewer); + LOG.info(sessionHandle + ": getDelegationToken()"); + return delegationToken; + } + + @Override + public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + sessionManager.getSession(sessionHandle). + cancelDelegationToken(authFactory, tokenStr); + LOG.info(sessionHandle + ": cancelDelegationToken()"); + } + + @Override + public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + sessionManager.getSession(sessionHandle).renewDelegationToken(authFactory, tokenStr); + LOG.info(sessionHandle + ": renewDelegationToken()"); + } } diff --git service/src/java/org/apache/hive/service/cli/CLIServiceClient.java service/src/java/org/apache/hive/service/cli/CLIServiceClient.java index 14ef54f..9e8542b 100644 --- service/src/java/org/apache/hive/service/cli/CLIServiceClient.java +++ service/src/java/org/apache/hive/service/cli/CLIServiceClient.java @@ -22,6 +22,8 @@ import java.util.List; import java.util.Map; +import org.apache.hive.service.auth.HiveAuthFactory; + /** * CLIServiceClient. @@ -158,4 +160,16 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 1000); } + @Override + public abstract String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException; + + @Override + public abstract void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException; + + @Override + public abstract void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException; + } diff --git service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java index 9dca874..8b960e2 100644 --- service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java +++ service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java @@ -21,6 +21,8 @@ import java.util.List; import java.util.Map; +import org.apache.hive.service.auth.HiveAuthFactory; + /** * EmbeddedCLIServiceClient. @@ -188,4 +190,22 @@ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientatio return cliService.fetchResults(opHandle, orientation, maxRows); } + + @Override + public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException { + return cliService.getDelegationToken(sessionHandle, authFactory, owner, renewer); + } + + @Override + public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + cliService.cancelDelegationToken(sessionHandle, authFactory, tokenStr); + } + + @Override + public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + cliService.renewDelegationToken(sessionHandle, authFactory, tokenStr); + } } diff --git service/src/java/org/apache/hive/service/cli/ICLIService.java service/src/java/org/apache/hive/service/cli/ICLIService.java index f647ce6..c4ebed5 100644 --- service/src/java/org/apache/hive/service/cli/ICLIService.java +++ service/src/java/org/apache/hive/service/cli/ICLIService.java @@ -23,6 +23,8 @@ +import org.apache.hive.service.auth.HiveAuthFactory; + public interface ICLIService { public abstract SessionHandle openSession(String username, String password, @@ -91,4 +93,14 @@ public abstract RowSet fetchResults(OperationHandle opHandle, FetchOrientation o public abstract RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException; + public abstract String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException; + + public abstract void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException; + + public abstract void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException; + + } diff --git service/src/java/org/apache/hive/service/cli/session/HiveSession.java service/src/java/org/apache/hive/service/cli/session/HiveSession.java index 00058cc..5d0b1b8 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSession.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSession.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.GetInfoType; import org.apache.hive.service.cli.GetInfoValue; @@ -179,4 +180,13 @@ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientatio public String getUserName(); public void setUserName(String userName); + + public String getDelegationToken(HiveAuthFactory authFactory, String owner, + String renewer) throws HiveSQLException; + + public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException; + + public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException; } diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 11c96b2..1dde7fc 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.history.HiveHistory; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.common.util.HiveVersionInfo; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.GetInfoType; import org.apache.hive.service.cli.GetInfoValue; @@ -394,4 +395,22 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { protected HiveSession getSession() { return this; } + + @Override + public String getDelegationToken(HiveAuthFactory authFactory, String owner, String renewer) + throws HiveSQLException { + throw new HiveSQLException("Delegation token access is only allowed with impersonation"); + } + + @Override + public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException { + throw new HiveSQLException("Delegation token access is only allowed with impersonation"); + } + + @Override + public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException { + throw new HiveSQLException("Delegation token access is only allowed with impersonation"); + } } diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java index ae7bb6b..0a8628d 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.HiveSQLException; /** @@ -163,5 +164,22 @@ public void setProxySession(HiveSession proxySession) { this.proxySession = proxySession; } + @Override + public String getDelegationToken(HiveAuthFactory authFactory, String owner, + String renewer) throws HiveSQLException { + return authFactory.getDelegationToken(owner, renewer); + } + + @Override + public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException { + authFactory.cancelDelegationToken(tokenStr); + } + + @Override + public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr) + throws HiveSQLException { + authFactory.renewDelegationToken(tokenStr); + } } diff --git service/src/java/org/apache/hive/service/cli/session/SessionManager.java service/src/java/org/apache/hive/service/cli/session/SessionManager.java index f392d62..f969d50 100644 --- service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -18,6 +18,7 @@ package org.apache.hive.service.cli.session; +import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -31,6 +32,9 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.hooks.HookUtils; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.CompositeService; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.SessionHandle; @@ -96,6 +100,7 @@ public SessionHandle openSession(String username, String password, Map sessionConf, String ipAddress) + throws HiveSQLException { + if (sessionConf == null || !sessionConf.containsKey(HiveAuthFactory.HS2_PROXY_USER)) { + return realUser; + } + + // Extract the proxy user name and check if we are allowed to do the substitution + String proxyUser = sessionConf.get(HiveAuthFactory.HS2_PROXY_USER); + if (!hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ALLOW_USER_SUBSTITUTION)) { + throw new HiveSQLException("Proxy user substitution is not allowed"); + } + if (!ShimLoader.getHadoopShims().isSecurityEnabled()) { + throw new HiveSQLException("Proxy user substitution is not supported for unsecure hadoop"); + } + + // Verify proxy user privilege of the realUser for the proxyUser + try { + UserGroupInformation sessionUgi = ShimLoader.getHadoopShims().createProxyUser(realUser); + ShimLoader.getHadoopShims(). + authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf); + return proxyUser; + } catch (IOException e) { + throw new HiveSQLException("Failed to validate proxy privilage of " + realUser + + " for " + proxyUser, e); + } + } } + diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index faecf93..71384b7 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -111,6 +111,77 @@ public synchronized void stop() { super.stop(); } + @Override + public TGetDelegationTokenResp GetDelegationToken(TGetDelegationTokenReq req) + throws TException { + TGetDelegationTokenResp resp = new TGetDelegationTokenResp(); + + if (hiveAuthFactory == null) { + resp.setStatus(unsecureTokenErrorStatus()); + } else { + try { + String token = cliService.getDelegationToken( + new SessionHandle(req.getSessionHandle()), + hiveAuthFactory, req.getOwner(), req.getRenewer()); + if (token == null || token.isEmpty()) { + throw new HiveSQLException("Got empty token"); + } + resp.setDelegationToken(token); + resp.setStatus(OK_STATUS); + } catch (HiveSQLException e) { + e.printStackTrace(); + TStatus tokenErrorStatus = HiveSQLException.toTStatus(e); + tokenErrorStatus.setSqlState("42000"); + resp.setStatus(tokenErrorStatus); + } + } + return resp; + } + + @Override + public TCancelDelegationTokenResp CancelDelegationToken(TCancelDelegationTokenReq req) + throws TException { + TCancelDelegationTokenResp resp = new TCancelDelegationTokenResp(); + + if (hiveAuthFactory == null) { + resp.setStatus(unsecureTokenErrorStatus()); + } else { + try { + cliService.cancelDelegationToken(new SessionHandle(req.getSessionHandle()), + hiveAuthFactory, req.getDelegationToken()); + } catch (HiveSQLException e) { + e.printStackTrace(); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + } + return resp; + } + + @Override + public TRenewDelegationTokenResp RenewDelegationToken(TRenewDelegationTokenReq req) + throws TException { + TRenewDelegationTokenResp resp = new TRenewDelegationTokenResp(); + if (hiveAuthFactory == null) { + resp.setStatus(unsecureTokenErrorStatus()); + } else { + try { + cliService.renewDelegationToken(new SessionHandle(req.getSessionHandle()), + hiveAuthFactory, req.getDelegationToken()); + } catch (HiveSQLException e) { + e.printStackTrace(); + resp.setStatus(HiveSQLException.toTStatus(e)); + } + } + return resp; + + } + + private TStatus unsecureTokenErrorStatus() { + TStatus errorStatus = new TStatus(TStatusCode.ERROR_STATUS); + errorStatus.setErrorMessage("Delegation token only supported over remote " + + "client with kerberos authentication"); + return errorStatus; + } @Override public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException { diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java index 9bb2a0f..692f0a6 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java @@ -21,6 +21,7 @@ import java.util.List; import java.util.Map; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.CLIServiceClient; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.GetInfoType; @@ -31,6 +32,7 @@ import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.TableSchema; +import org.apache.thrift.TException; /** * ThriftCLIServiceClient. @@ -388,4 +390,48 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException { // TODO: set the correct default fetch size return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 10000); } + + @Override + public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String owner, String renewer) throws HiveSQLException { + TGetDelegationTokenReq req = new TGetDelegationTokenReq( + sessionHandle.toTSessionHandle(), owner, renewer); + try { + TGetDelegationTokenResp tokenResp = cliService.GetDelegationToken(req); + checkStatus(tokenResp.getStatus()); + return tokenResp.getDelegationToken(); + } catch (Exception e) { + throw new HiveSQLException(e); + } + } + + @Override + public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + TCancelDelegationTokenReq cancelReq = new TCancelDelegationTokenReq( + sessionHandle.toTSessionHandle(), tokenStr); + try { + TCancelDelegationTokenResp cancelResp = + cliService.CancelDelegationToken(cancelReq); + checkStatus(cancelResp.getStatus()); + return; + } catch (TException e) { + throw new HiveSQLException(e); + } + } + + @Override + public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, + String tokenStr) throws HiveSQLException { + TRenewDelegationTokenReq cancelReq = new TRenewDelegationTokenReq( + sessionHandle.toTSessionHandle(), tokenStr); + try { + TRenewDelegationTokenResp renewResp = + cliService.RenewDelegationToken(cancelReq); + checkStatus(renewResp.getStatus()); + return; + } catch (Exception e) { + throw new HiveSQLException(e); + } + } } diff --git shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java index f57f09e..6b20251 100644 --- shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java +++ shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java @@ -666,6 +666,11 @@ public void remove() { } @Override + public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi, + String ipAddress, Configuration conf) throws IOException { + // This hadoop version doesn't have proxy verification + } + public boolean isSecurityEnabled() { return false; } diff --git shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java index 28843e0..56bfd0f 100644 --- shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java +++ shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java @@ -63,6 +63,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenSelector; @@ -553,6 +554,13 @@ public UserGroupInformation createProxyUser(String userName) throws IOException } @Override + public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi, + String ipAddress, Configuration conf) throws IOException { + ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, realUserUgi), + ipAddress, conf); + } + + @Override public boolean isSecurityEnabled() { return UserGroupInformation.isSecurityEnabled(); } diff --git shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java index e7aa79e..b095ff0 100644 --- shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -402,7 +402,14 @@ public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration co * @param userName * @return */ - UserGroupInformation createProxyUser(String userName) throws IOException; + public UserGroupInformation createProxyUser(String userName) throws IOException; + + /** + * Verify proxy access to given UGI for given user + * @param ugi + */ + public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi, + String ipAddress, Configuration conf) throws IOException; /** * The method sets to set the partition file has a different signature between