diff --git beeline/pom.xml beeline/pom.xml
index 7449430..f47ca65 100644
--- beeline/pom.xml
+++ beeline/pom.xml
@@ -145,6 +145,19 @@
${basedir}/src/java
${basedir}/src/test
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+ test-jar
+
+
+
+
+
diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 563d242..9afa5af 100644
--- beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -502,6 +502,7 @@ boolean initArgs(String[] args) {
List commands = new LinkedList();
List files = new LinkedList();
String driver = null, user = null, pass = null, url = null, cmd = null;
+ String auth = null;
for (int i = 0; i < args.length; i++) {
if (args[i].equals("--help") || args[i].equals("-h")) {
@@ -554,6 +555,9 @@ boolean initArgs(String[] args) {
driver = args[i++ + 1];
} else if (args[i].equals("-n")) {
user = args[i++ + 1];
+ } else if (args[i].equals("-a")) {
+ auth = args[i++ + 1];
+ getOpts().setAuthType(auth);
} else if (args[i].equals("-p")) {
pass = args[i++ + 1];
} else if (args[i].equals("-u")) {
diff --git beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
index 91e20ec..44cabdf 100644
--- beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
+++ beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java
@@ -86,6 +86,8 @@
private String historyFile = new File(saveDir(), "history").getAbsolutePath();
private String scriptFile = null;
+ private String authType = null;
+
private Map hiveVariables = new HashMap();
private Map hiveConfVariables = new HashMap();
@@ -248,6 +250,13 @@ public void setFastConnect(boolean fastConnect) {
this.fastConnect = fastConnect;
}
+ public String getAuthType() {
+ return authType;
+ }
+
+ public void setAuthType(String authType) {
+ this.authType = authType;
+ }
public boolean getFastConnect() {
return fastConnect;
diff --git beeline/src/java/org/apache/hive/beeline/Commands.java beeline/src/java/org/apache/hive/beeline/Commands.java
index d2d7fd3..8694864 100644
--- beeline/src/java/org/apache/hive/beeline/Commands.java
+++ beeline/src/java/org/apache/hive/beeline/Commands.java
@@ -876,6 +876,7 @@ public boolean connect(String line) throws Exception {
if (pass != null) {
props.setProperty("password", pass);
}
+
return connect(props);
}
@@ -922,6 +923,7 @@ public boolean connect(Properties props) throws IOException {
"javax.jdo.option.ConnectionPassword",
"ConnectionPassword",
});
+ String auth = getProperty(props, new String[] {"auth"});
if (url == null || url.length() == 0) {
return beeLine.error("Property \"url\" is required");
@@ -937,14 +939,23 @@ public boolean connect(Properties props) throws IOException {
if (username == null) {
username = beeLine.getConsoleReader().readLine("Enter username for " + url + ": ");
}
+ props.setProperty("user", username);
if (password == null) {
password = beeLine.getConsoleReader().readLine("Enter password for " + url + ": ",
new Character('*'));
}
+ props.setProperty("password", password);
+
+ if (auth == null) {
+ auth = beeLine.getOpts().getAuthType();
+ }
+ if (auth != null) {
+ props.setProperty("auth", auth);
+ }
try {
beeLine.getDatabaseConnections().setConnection(
- new DatabaseConnection(beeLine, driver, url, username, password));
+ new DatabaseConnection(beeLine, driver, url, props));
beeLine.getDatabaseConnection().getConnection();
beeLine.setCompletions();
@@ -1171,8 +1182,8 @@ private boolean stopRecording(String line) {
} catch (Exception e) {
beeLine.handleException(e);
}
- beeLine.output(beeLine.loc("record-closed", beeLine.getRecordOutputFile()));
beeLine.setRecordOutputFile(null);
+ beeLine.output(beeLine.loc("record-closed", beeLine.getRecordOutputFile()));
return true;
}
@@ -1191,8 +1202,9 @@ private boolean startRecording(String line) {
}
try {
- beeLine.setRecordOutputFile(new OutputFile(parts[1]));
- beeLine.output(beeLine.loc("record-started", beeLine.getRecordOutputFile()));
+ OutputFile recordOutput = new OutputFile(parts[1]);
+ beeLine.output(beeLine.loc("record-started", recordOutput));
+ beeLine.setRecordOutputFile(recordOutput);
return true;
} catch (Exception e) {
return beeLine.error(e);
diff --git beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
index 94178ef..00b49af 100644
--- beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
+++ beeline/src/java/org/apache/hive/beeline/DatabaseConnection.java
@@ -30,6 +30,7 @@
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List;
+import java.util.Properties;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
@@ -49,19 +50,17 @@
private DatabaseMetaData meta;
private final String driver;
private final String url;
- private final String username;
- private final String password;
+ private final Properties info;
private Schema schema = null;
private Completor sqlCompletor = null;
public DatabaseConnection(BeeLine beeLine, String driver, String url,
- String username, String password) throws SQLException {
+ Properties info) throws SQLException {
this.beeLine = beeLine;
this.driver = driver;
- this.username = username;
- this.password = password;
this.url = url;
+ this.info = info;
}
@Override
@@ -133,9 +132,6 @@ boolean connect() throws SQLException {
return beeLine.error(e);
}
- Properties info = new Properties();
- info.put(HIVE_AUTH_USER, username);
- info.put(HIVE_AUTH_PASSWD, password);
Map hiveVars = beeLine.getOpts().getHiveVariables();
for (Map.Entry var : hiveVars.entrySet()) {
info.put(HIVE_VAR_PREFIX + var.getKey(), var.getValue());
@@ -312,4 +308,4 @@ public Column(String name) {
}
}
}
-}
\ No newline at end of file
+}
diff --git beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
new file mode 100644
index 0000000..95146e9
--- /dev/null
+++ beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
@@ -0,0 +1,382 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.beeline;
+
+import java.io.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.beeline.BeeLine;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.service.auth.HiveAuthFactory;
+
+/**
+ * Simple client application to test various direct and proxy connection to HiveServer2
+ * Note that it's not an automated test at this point. It requires a manually configured
+ * secure HivServer2. It also requires a super user and a normal user principal.
+ * Steps to run the test -
+ * kinit
+ * hive --service jar beeline/target/hive-beeline-0.13.0-SNAPSHOT-tests.jar \
+ * org.apache.hive.beeline.ProxyAuthTest \
+ *
+ */
+public class ProxyAuthTest {
+ private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
+ private static final String BEELINE_EXIT = "beeline.system.exit";
+ private static Connection con = null;
+ private static boolean noClose = false;
+ private static String tabName = "jdbc_test";
+ private static String tabDataFileName;
+ private static String scriptFileName;
+ private static String [] dmlStmts;
+ private static String [] dfsStmts;
+ private static String [] selectStmts;
+ private static String [] cleanUpStmts;
+ private static InputStream inpStream = null;
+ private static int tabCount = 1;
+ private static File resultFile= null;
+
+ public static void main(String[] args) throws Exception {
+ if (args.length < 4) {
+ System.out.println("Usage ProxyAuthTest [testTab]");
+ System.exit(1);
+ }
+
+ File currentResultFile = null;
+ String [] beeLineArgs = {};
+
+ Class.forName(driverName);
+ String host = args[0];
+ String port = args[1];
+ String serverPrincipal = args[2];
+ String proxyUser = args[3];
+ String url = null;
+ if (args.length > 4) {
+ tabName = args[4];
+ }
+
+ generateData();
+ generateSQL(null);
+
+ try {
+ /*
+ * Connect via kerberos and get delegation token
+ */
+ url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
+ con = DriverManager.getConnection(url);
+ System.out.println("Connected successfully to " + url);
+ // get delegation token for the given proxy user
+ String token = ((HiveConnection)con).getDelegationToken(proxyUser, serverPrincipal);
+ if ("true".equals(System.getProperty("proxyAuth.debug", "false"))) {
+ System.out.println("Got token: " + token);
+ }
+ con.close();
+
+ // so that beeline won't kill the JVM
+ System.setProperty(BEELINE_EXIT, "true");
+
+ // connect using principal via Beeline with inputStream
+ url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
+ currentResultFile = generateSQL(null);
+ beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar"};
+ System.out.println("Connection with kerberos, user/password via args, using input rediction");
+ BeeLine.mainWithInputRedirection(beeLineArgs, inpStream);
+ compareResults( currentResultFile);
+
+ // connect using principal via Beeline with inputStream
+ url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
+ currentResultFile = generateSQL(null);
+ beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-f" , scriptFileName};
+ System.out.println("Connection with kerberos, user/password via args, using input script");
+ BeeLine.main(beeLineArgs);
+ compareResults( currentResultFile);
+
+ // connect using principal via Beeline with inputStream
+ url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
+ currentResultFile = generateSQL(url+ " foo bar ");
+ beeLineArgs = new String[] { "-u", url, "-f" , scriptFileName};
+ System.out.println("Connection with kerberos, user/password via connect, using input script");
+ BeeLine.main(beeLineArgs);
+ compareResults( currentResultFile);
+
+ // connect using principal via Beeline with inputStream
+ url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal;
+ currentResultFile = generateSQL(url+ " foo bar ");
+ beeLineArgs = new String[] { "-u", url, "-f" , scriptFileName};
+ System.out.println("Connection with kerberos, user/password via connect, using input redirect");
+ BeeLine.mainWithInputRedirection(beeLineArgs, inpStream);
+ compareResults( currentResultFile);
+
+ /*
+ * Connect using the delegation token passed via configuration object
+ */
+ System.out.println("Store token into ugi and try");
+ storeTokenInJobConf(token);
+ url = "jdbc:hive2://" + host + ":" + port + "/default;auth=delegationToken";
+ con = DriverManager.getConnection(url);
+ System.out.println("Connecting to " + url);
+ runTest();
+ con.close();
+
+ // connect using token via Beeline with inputStream
+ url = "jdbc:hive2://" + host + ":" + port + "/default";
+ currentResultFile = generateSQL(null);
+ beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-a", "delegationToken" };
+ System.out.println("Connection with token, user/password via args, using input redirection");
+ BeeLine.mainWithInputRedirection(beeLineArgs, inpStream);
+ compareResults( currentResultFile);
+
+ // connect using token via Beeline using script
+ url = "jdbc:hive2://" + host + ":" + port + "/default";
+ currentResultFile = generateSQL(null);
+ beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-a", "delegationToken",
+ "-f", scriptFileName};
+ System.out.println("Connection with token, user/password via args, using input script");
+ BeeLine.main(beeLineArgs);
+ compareResults( currentResultFile);
+
+ // connect using token via Beeline using script
+ url = "jdbc:hive2://" + host + ":" + port + "/default";
+ currentResultFile = generateSQL(url + " foo bar ");
+ beeLineArgs = new String [] {"-a", "delegationToken", "-f", scriptFileName};
+ System.out.println("Connection with token, user/password via connect, using input script");
+ BeeLine.main(beeLineArgs);
+ compareResults( currentResultFile);
+
+ // connect using token via Beeline using script
+ url = "jdbc:hive2://" + host + ":" + port + "/default";
+ currentResultFile = generateSQL(url + " foo bar ");
+ System.out.println("Connection with token, user/password via connect, using input script");
+ beeLineArgs = new String [] {"-f", scriptFileName, "-a", "delegationToken"};
+ BeeLine.main(beeLineArgs);
+ compareResults( currentResultFile);
+
+ /*
+ * Connect via kerberos with trusted proxy user
+ */
+ url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" + serverPrincipal
+ + ";hive.server2.proxy.user=" + proxyUser;
+ con = DriverManager.getConnection(url);
+ System.out.println("Connected successfully to " + url);
+ runTest();
+
+ ((HiveConnection)con).cancelDelegationToken(token);
+ con.close();
+ } catch (SQLException e) {
+ System.out.println("*** SQLException: " + e.getMessage() + " : " + e.getSQLState());
+ e.printStackTrace();
+ }
+
+ /* verify the connection fails after canceling the token */
+ try {
+ url = "jdbc:hive2://" + host + ":" + port + "/default;auth=delegationToken";
+ con = DriverManager.getConnection(url);
+ throw new Exception ("connection should have failed after token cancelation");
+ } catch (SQLException e) {
+ // Expected to fail due to canceled token
+ }
+ }
+
+ private static void storeTokenInJobConf(String tokenStr) throws Exception {
+ ShimLoader.getHadoopShims().setTokenStr(ShimLoader.getHadoopShims().getUGIForConf(new Configuration()),
+ tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
+ System.out.println("Stored token " + tokenStr);
+ }
+
+ // run sql operations
+ private static void runTest() throws Exception {
+ // craete table and check dir ownership
+ runDMLs();
+
+ // run queries
+ for (String stmt: dfsStmts) {
+ runQuery(stmt);
+ }
+
+ // run queries
+ for (String stmt: selectStmts) {
+ runQuery(stmt);
+ }
+
+ // delete all the objects created
+ cleanUp();
+ }
+
+ // create tables and load data
+ private static void runDMLs() throws Exception {
+ for (String stmt : dmlStmts) {
+ exStatement(stmt);
+ }
+ }
+
+ // drop tables
+ private static void cleanUp() throws Exception {
+ for (String stmt : cleanUpStmts) {
+ exStatement(stmt);
+ }
+ }
+
+ private static void runQuery(String sqlStmt) throws Exception {
+ Statement stmt = con.createStatement();
+ ResultSet res = stmt.executeQuery(sqlStmt);
+
+ ResultSetMetaData meta = res.getMetaData();
+ System.out.println("Resultset has " + meta.getColumnCount() + " columns");
+ for (int i = 1; i <= meta.getColumnCount(); i++) {
+ System.out.println("Column #" + i + " Name: " + meta.getColumnName(i) +
+ " Type: " + meta.getColumnType(i));
+ }
+
+ while (res.next()) {
+ for (int i = 1; i <= meta.getColumnCount(); i++) {
+ System.out.println("Column #" + i + ": " + res.getString(i));
+ }
+ }
+ res.close();
+ stmt.close();
+ }
+
+ // Execute the given sql statement
+ private static void exStatement(String query) throws Exception {
+ Statement stmt = con.createStatement();
+ stmt.execute(query);
+ if (!noClose) {
+ stmt.close();
+ }
+ }
+
+ // generate SQL stmts to execute
+ private static File generateSQL(String url) throws Exception {
+ String current = new java.io.File( "." ).getCanonicalPath();
+ String currentDir = System.getProperty("user.dir");
+ String queryTab = tabName + "_" + (tabCount++);
+ dmlStmts = new String[] {
+ "USE default",
+ "drop table if exists " + queryTab,
+ "create table " + queryTab + "(id int, name string) " +
+ "ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'",
+ "load data local inpath '" + tabDataFileName + "' into table " + queryTab
+ };
+ selectStmts = new String[] {
+ "select * from " + queryTab + " limit 5",
+ "select name, id from " + queryTab + " where id < 3",
+ };
+ dfsStmts = new String[] {
+// "set " + SESSION_USER_NAME,
+// "dfs -ls -d ${hiveconf:hive.metastore.warehouse.dir}/" + queryTab
+ };
+ cleanUpStmts = new String[] {
+ "drop table if exists " + queryTab
+ };
+
+ // write sql statements to file
+ return writeArrayToByteStream(url);
+ }
+
+ // generate data file for test
+ private static void generateData() throws Exception {
+ String fileData[] = {
+ "1|aaa",
+ "2|bbb",
+ "3|ccc",
+ "4|ddd",
+ "5|eee",
+ };
+
+ File tmpFile = File.createTempFile(tabName, ".data");
+ tmpFile.deleteOnExit();
+ tabDataFileName = tmpFile.getPath();
+ FileWriter fstream = new FileWriter(tabDataFileName);
+ BufferedWriter out = new BufferedWriter(fstream);
+ for (String line: fileData) {
+ out.write(line);
+ out.newLine();
+ }
+ out.close();
+ tmpFile.setWritable(true, true);
+ }
+
+ // Create a input stream of given name.ext and write sql statements to to it
+ // Returns the result File object which will contain the query results
+ private static File writeArrayToByteStream(String url) throws Exception {
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+
+ if (url != null) {
+ writeCmdLine("!connect " + url, out);
+ }
+ writeCmdLine("!brief", out);
+ writeCmdLine("!set silent true", out);
+ resultFile = File.createTempFile(tabName, ".out");
+ if (!"true".equals(System.getProperty("proxyAuth.debug", "false"))) {
+ resultFile.deleteOnExit();
+ }
+ writeCmdLine("!record " + resultFile.getPath(), out);
+
+ for (String stmt: dmlStmts) {
+ writeSqlLine(stmt, out);
+ }
+
+ for (String stmt: selectStmts) {
+ writeSqlLine(stmt, out);
+ }
+
+ for (String stmt: cleanUpStmts) {
+ writeSqlLine(stmt, out);
+ }
+ writeCmdLine("!record", out);
+ writeCmdLine("!quit", out);
+
+ File tmpFile = File.createTempFile(tabName, ".q");
+ tmpFile.deleteOnExit();
+ scriptFileName = tmpFile.getPath();
+ FileOutputStream fstream = new FileOutputStream(scriptFileName);
+ out.writeTo(fstream);
+
+ inpStream = new ByteArrayInputStream(out.toByteArray());
+ return resultFile;
+ }
+
+ // write stmt + ";" + System.getProperty("line.separator")
+ private static void writeSqlLine(String stmt, OutputStream out) throws Exception {
+ out.write(stmt.getBytes());
+ out.write(";".getBytes());
+ out.write(System.getProperty("line.separator").getBytes());
+ }
+
+ private static void writeCmdLine(String cmdLine, OutputStream out) throws Exception {
+ out.write(cmdLine.getBytes());
+ out.write(System.getProperty("line.separator").getBytes());
+ }
+
+ private static void compareResults(File file2) throws IOException {
+ // load the expected results
+ File baseResultFile = new File(System.getProperty("proxyAuth.res.file"), "data/files/ProxyAuth.res");
+ if (!FileUtils.contentEquals(baseResultFile, file2)) {
+ throw new IOException("File compare failed: " + file2.getPath() + " differs");
+ }
+ }
+}
+
diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 388a604..e410cb1 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -833,6 +833,7 @@
// HiveServer2 auth configuration
HIVE_SERVER2_AUTHENTICATION("hive.server2.authentication", "NONE",
new StringsValidator("NOSASL", "NONE", "LDAP", "KERBEROS", "PAM", "CUSTOM")),
+ HIVE_SERVER2_ALLOW_USER_SUBSTITUTION("hive.server2.allow.user.substitution", true),
HIVE_SERVER2_KERBEROS_KEYTAB("hive.server2.authentication.kerberos.keytab", ""),
HIVE_SERVER2_KERBEROS_PRINCIPAL("hive.server2.authentication.kerberos.principal", ""),
HIVE_SERVER2_PLAIN_LDAP_URL("hive.server2.authentication.ldap.url", null),
diff --git conf/hive-default.xml.template conf/hive-default.xml.template
index 3f01e0b..d3e7ba6 100644
--- conf/hive-default.xml.template
+++ conf/hive-default.xml.template
@@ -2406,4 +2406,12 @@
By default tez will use the java opts from map tasks. This can be used to overwrite.
+
+ hive.server2.allow.user.substitution
+ true
+
+ Allow alternate user to be specified as part of HiveServer2 open connection request
+
+
+
diff --git data/files/ProxyAuth.res data/files/ProxyAuth.res
new file mode 100644
index 0000000..96eca8f
--- /dev/null
+++ data/files/ProxyAuth.res
@@ -0,0 +1,15 @@
++-----+-------+
+| id | name |
++-----+-------+
+| 1 | aaa |
+| 2 | bbb |
+| 3 | ccc |
+| 4 | ddd |
+| 5 | eee |
++-----+-------+
++-------+-----+
+| name | id |
++-------+-----+
+| aaa | 1 |
+| bbb | 2 |
++-------+-----+
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 8210e75..c91df83 100644
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -260,7 +260,7 @@ private void checkBadUrl(String url) throws SQLException {
try{
DriverManager.getConnection(url, "", "");
fail("should have thrown IllegalArgumentException but did not ");
- }catch(IllegalArgumentException i){
+ } catch(SQLException i) {
assertTrue(i.getMessage().contains("Bad URL format. Hostname not found "
+ " in authority part of the url"));
}
diff --git jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
index d08e05b..59ce692 100644
--- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
+++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
@@ -121,7 +121,6 @@ public HiveConnection(String uri, Properties info) throws SQLException {
isClosed = false;
configureConnection();
}
-
public void abort(Executor executor) throws SQLException {
// JDK 1.7
diff --git jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 4102d7a..6659ace 100644
--- jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -19,6 +19,7 @@
package org.apache.hive.jdbc;
import java.io.FileInputStream;
+import java.io.IOException;
import java.security.KeyStore;
import java.sql.Array;
import java.sql.Blob;
@@ -54,16 +55,23 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
import org.apache.hive.service.auth.SaslQOP;
import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCancelDelegationTokenReq;
+import org.apache.hive.service.cli.thrift.TCancelDelegationTokenResp;
import org.apache.hive.service.cli.thrift.TCloseSessionReq;
+import org.apache.hive.service.cli.thrift.TGetDelegationTokenReq;
+import org.apache.hive.service.cli.thrift.TGetDelegationTokenResp;
import org.apache.hive.service.cli.thrift.TOpenSessionReq;
import org.apache.hive.service.cli.thrift.TOpenSessionResp;
import org.apache.hive.service.cli.thrift.TProtocolVersion;
+import org.apache.hive.service.cli.thrift.TRenewDelegationTokenReq;
+import org.apache.hive.service.cli.thrift.TRenewDelegationTokenResp;
import org.apache.hive.service.cli.thrift.TSessionHandle;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
@@ -85,6 +93,7 @@
private static final String HIVE_AUTH_TYPE= "auth";
private static final String HIVE_AUTH_QOP = "sasl.qop";
private static final String HIVE_AUTH_SIMPLE = "noSasl";
+ private static final String HIVE_AUTH_TOKEN = "delegationToken";
private static final String HIVE_AUTH_USER = "user";
private static final String HIVE_AUTH_PRINCIPAL = "principal";
private static final String HIVE_AUTH_PASSWD = "password";
@@ -119,7 +128,12 @@ public HiveConnection(String uri, Properties info) throws SQLException {
setupLoginTimeout();
jdbcURI = uri;
// parse the connection uri
- Utils.JdbcConnectionParams connParams = Utils.parseURL(jdbcURI);
+ Utils.JdbcConnectionParams connParams;
+ try {
+ connParams = Utils.parseURL(uri);
+ } catch (IllegalArgumentException e) {
+ throw new SQLException(e);
+ }
// extract parsed connection parameters:
// JDBC URL: jdbc:hive2://:/dbName;sess_var_list?hive_conf_list#hive_var_list
// each list: =;= and so on
@@ -148,13 +162,17 @@ public HiveConnection(String uri, Properties info) throws SQLException {
if (isEmbeddedMode) {
client = new EmbeddedThriftBinaryCLIService();
} else {
- // extract user/password from JDBC connection properties if its not supplied in the connection URL
+ // extract user/password from JDBC connection properties if its not supplied in the
+ // connection URL
if (info.containsKey(HIVE_AUTH_USER)) {
sessConfMap.put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
if (info.containsKey(HIVE_AUTH_PASSWD)) {
sessConfMap.put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
}
}
+ if (info.containsKey(HIVE_AUTH_TYPE)) {
+ sessConfMap.put(HIVE_AUTH_TYPE, info.getProperty(HIVE_AUTH_TYPE));
+ }
// open the client transport
openTransport();
// set up the client
@@ -170,7 +188,7 @@ public HiveConnection(String uri, Properties info) throws SQLException {
supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6);
// open client session
- openSession();
+ openSession(connParams.getSessionVars());
configureConnection(connParams.getDbName());
}
@@ -259,14 +277,26 @@ private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException {
}
}
+ /**
+ * Create transport per the connection options
+ * Supported transport options are:
+ * - SASL based transports over
+ * + Kerberos
+ * + Delegation token
+ * + SSL
+ * + non-SSL
+ * - Raw (non-SASL) socket
+ *
+ * Kerberos and Delegation token supports SASL QOP configurations
+ */
private TTransport createBinaryTransport() throws SQLException {
try {
// handle secure connection if specified
if (!HIVE_AUTH_SIMPLE.equals(sessConfMap.get(HIVE_AUTH_TYPE))) {
// If Kerberos
+ Map saslProps = new HashMap();
+ SaslQOP saslQOP = SaslQOP.AUTH;
if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) {
- Map saslProps = new HashMap();
- SaslQOP saslQOP = SaslQOP.AUTH;
if (sessConfMap.containsKey(HIVE_AUTH_QOP)) {
try {
saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP));
@@ -281,30 +311,43 @@ private TTransport createBinaryTransport() throws SQLException {
sessConfMap.get(HIVE_AUTH_PRINCIPAL), host,
HiveAuthFactory.getSocketTransport(host, port, loginTimeout), saslProps);
} else {
- String userName = sessConfMap.get(HIVE_AUTH_USER);
- if ((userName == null) || userName.isEmpty()) {
- userName = HIVE_ANONYMOUS_USER;
- }
- String passwd = sessConfMap.get(HIVE_AUTH_PASSWD);
- if ((passwd == null) || passwd.isEmpty()) {
- passwd = HIVE_ANONYMOUS_PASSWD;
- }
- String useSslStr = sessConfMap.get(HIVE_USE_SSL);
- if ("true".equalsIgnoreCase(useSslStr)) {
- String sslTrustStore = sessConfMap.get(HIVE_SSL_TRUST_STORE);
- String sslTrustStorePassword = sessConfMap.get(HIVE_SSL_TRUST_STORE_PASSWORD);
- if (sslTrustStore == null || sslTrustStore.isEmpty()) {
- transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout);
- } else {
- transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout,
+ // If there's a delegation token available then use token based connection
+ String tokenStr = getClientDelegationToken(sessConfMap);
+ if (tokenStr != null) {
+ transport = KerberosSaslHelper.getTokenTransport(tokenStr,
+ host, HiveAuthFactory.getSocketTransport(host, port, loginTimeout), saslProps);
+ } else {
+ // we are using PLAIN Sasl connection with user/password
+ String userName = sessConfMap.get(HIVE_AUTH_USER);
+ if ((userName == null) || userName.isEmpty()) {
+ userName = HIVE_ANONYMOUS_USER;
+ }
+ String passwd = sessConfMap.get(HIVE_AUTH_PASSWD);
+ if ((passwd == null) || passwd.isEmpty()) {
+ passwd = HIVE_ANONYMOUS_PASSWD;
+ }
+ String useSslStr = sessConfMap.get(HIVE_USE_SSL);
+ if ("true".equalsIgnoreCase(useSslStr)) {
+ // get SSL socket
+ String sslTrustStore = sessConfMap.get(HIVE_SSL_TRUST_STORE);
+ String sslTrustStorePassword = sessConfMap.get(HIVE_SSL_TRUST_STORE_PASSWORD);
+ if (sslTrustStore == null || sslTrustStore.isEmpty()) {
+ transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout);
+ } else {
+ transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout,
sslTrustStore, sslTrustStorePassword);
+ }
+ transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
+ } else {
+ // get non-SSL socket transport
+ transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout);
}
- } else {
- transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout);
- }
+ // Overlay the SASL transport on top of the base socket transport (SSL or non-SSL)
transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
+ }
}
} else {
+ // Raw socket connection (non-sasl)
transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout);
}
} catch (SaslException e) {
@@ -327,11 +370,32 @@ private boolean isHttpTransportMode() {
return false;
}
- private void openSession() throws SQLException {
+ // Lookup the delegation token. First in the connection URL, then Configuration
+ private String getClientDelegationToken(Map jdbcConnConf)
+ throws SQLException {
+ String tokenStr = null;
+ if (HIVE_AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(HIVE_AUTH_TYPE))) {
+ // check delegation token in job conf if any
+ try {
+ tokenStr = ShimLoader.getHadoopShims().
+ getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
+ } catch (IOException e) {
+ throw new SQLException("Error reading token ", e);
+ }
+ }
+ return tokenStr;
+ }
+
+ private void openSession(Map sessVars) throws SQLException {
TOpenSessionReq openReq = new TOpenSessionReq();
// set the session configuration
- // openReq.setConfiguration(null);
+ if (sessVars.containsKey(HiveAuthFactory.HS2_PROXY_USER)) {
+ Map openConf = new HashMap();
+ openConf.put(HiveAuthFactory.HS2_PROXY_USER,
+ sessVars.get(HiveAuthFactory.HS2_PROXY_USER));
+ openReq.setConfiguration(openConf);
+ }
try {
TOpenSessionResp openResp = client.OpenSession(openReq);
@@ -420,6 +484,44 @@ public void abort(Executor executor) throws SQLException {
throw new SQLException("Method not supported");
}
+ public String getDelegationToken(String owner, String renewer) throws SQLException {
+ TGetDelegationTokenReq req = new TGetDelegationTokenReq(sessHandle, owner, renewer);
+ try {
+ TGetDelegationTokenResp tokenResp = client.GetDelegationToken(req);
+ Utils.verifySuccess(tokenResp.getStatus());
+ return tokenResp.getDelegationToken();
+ } catch (TException e) {
+ throw new SQLException("Could not retrieve token: " +
+ e.getMessage(), " 08S01", e);
+ }
+ }
+
+ public void cancelDelegationToken(String tokenStr) throws SQLException {
+ TCancelDelegationTokenReq cancelReq = new TCancelDelegationTokenReq(sessHandle, tokenStr);
+ try {
+ TCancelDelegationTokenResp cancelResp =
+ client.CancelDelegationToken(cancelReq);
+ Utils.verifySuccess(cancelResp.getStatus());
+ return;
+ } catch (TException e) {
+ throw new SQLException("Could not cancel token: " +
+ e.getMessage(), " 08S01", e);
+ }
+ }
+
+ public void renewDelegationToken(String tokenStr) throws SQLException {
+ TRenewDelegationTokenReq cancelReq = new TRenewDelegationTokenReq(sessHandle, tokenStr);
+ try {
+ TRenewDelegationTokenResp renewResp =
+ client.RenewDelegationToken(cancelReq);
+ Utils.verifySuccess(renewResp.getStatus());
+ return;
+ } catch (TException e) {
+ throw new SQLException("Could not renew token: " +
+ e.getMessage(), " 08S01", e);
+ }
+ }
+
/*
* (non-Javadoc)
*
diff --git jdbc/src/java/org/apache/hive/jdbc/Utils.java jdbc/src/java/org/apache/hive/jdbc/Utils.java
index 608837e..87fec11 100644
--- jdbc/src/java/org/apache/hive/jdbc/Utils.java
+++ jdbc/src/java/org/apache/hive/jdbc/Utils.java
@@ -145,7 +145,7 @@ public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentEx
JdbcConnectionParams connParams = new JdbcConnectionParams();
if (!uri.startsWith(URL_PREFIX)) {
- throw new IllegalArgumentException("Bad URL format");
+ throw new IllegalArgumentException("Bad URL format: Missing prefix " + URL_PREFIX);
}
// For URLs with no other configuration
@@ -197,7 +197,9 @@ public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentEx
if (sessVars != null) {
Matcher sessMatcher = pattern.matcher(sessVars);
while (sessMatcher.find()) {
- connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2));
+ if (connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2)) != null) {
+ throw new IllegalArgumentException("Bad URL format: Multiple values for property " + sessMatcher.group(1));
+ }
}
}
}
diff --git service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index d8ba3aa..6759903 100644
--- service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -32,7 +32,9 @@
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.transport.TSSLTransportFactory;
import org.apache.thrift.transport.TServerSocket;
@@ -67,7 +69,10 @@ public String getAuthName() {
private HadoopThriftAuthBridge.Server saslServer = null;
private String authTypeStr;
- HiveConf conf;
+ private final HiveConf conf;
+
+ public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
+ public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
public HiveAuthFactory() throws TTransportException {
conf = new HiveConf();
@@ -82,6 +87,13 @@ public HiveAuthFactory() throws TTransportException {
conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)
);
+ // start delegation token manager
+ try {
+ saslServer.startDelegationTokenSecretManager(conf, null);
+ } catch (IOException e) {
+ throw new TTransportException("Failed to start token manager", e);
+ }
+
}
}
@@ -145,6 +157,10 @@ public String getRemoteUser() {
}
}
+ public String getIpAddress() {
+ return saslServer != null ? saslServer.getRemoteAddress().toString() : null;
+ }
+
/* perform kerberos login using the hadoop shim API if the configuration is available */
public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
@@ -204,4 +220,81 @@ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum,
return TSSLTransportFactory.getServerSocket(portNum, 10000, serverAddress, params);
}
+ // retrieve delegation token for the given user
+ public String getDelegationToken(String owner, String renewer) throws HiveSQLException {
+ if (saslServer == null) {
+ throw new HiveSQLException(
+ "Delegation token only supported over kerberos authentication");
+ }
+
+ try {
+ String tokenStr = saslServer.getDelegationTokenWithService(owner, renewer, HS2_CLIENT_TOKEN);
+ if (tokenStr == null || tokenStr.isEmpty()) {
+ throw new HiveSQLException("Received empty retrieving delegation token for user " + owner);
+ }
+ return tokenStr;
+ } catch (IOException e) {
+ throw new HiveSQLException("Error retrieving delegation token for user " + owner, e);
+ } catch (InterruptedException e) {
+ throw new HiveSQLException("delegation token retrieval interrupted", e);
+ }
+ }
+
+ // cancel given delegation token
+ public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
+ if (saslServer == null) {
+ throw new HiveSQLException(
+ "Delegation token only supported over kerberos authentication");
+ }
+ try {
+ saslServer.cancelDelegationToken(delegationToken);
+ } catch (IOException e) {
+ throw new HiveSQLException("Error canceling delegation token " + delegationToken, e);
+ }
+ }
+
+ public void renewDelegationToken(String delegationToken) throws HiveSQLException {
+ if (saslServer == null) {
+ throw new HiveSQLException(
+ "Delegation token only supported over kerberos authentication");
+ }
+ try {
+ saslServer.renewDelegationToken(delegationToken);
+ } catch (IOException e) {
+ throw new HiveSQLException("Error renewing delegation token " + delegationToken, e);
+ }
+ }
+
+ public String getUserFromToken(String delegationToken) throws HiveSQLException {
+ if (saslServer == null) {
+ throw new HiveSQLException(
+ "Delegation token only supported over kerberos authentication");
+ }
+ try {
+ return saslServer.getUserFromToken(delegationToken);
+ } catch (IOException e) {
+ throw new HiveSQLException("Error extracting user from delegation token " + delegationToken, e);
+ }
+ }
+
+ public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
+ HiveConf hiveConf) throws HiveSQLException {
+ UserGroupInformation sessionUgi;
+
+ try {
+ if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
+ sessionUgi = ShimLoader.getHadoopShims().createProxyUser(realUser);
+ } else {
+ sessionUgi = ShimLoader.getHadoopShims().createRemoteUser(realUser, null);
+ }
+ if (!proxyUser.equalsIgnoreCase(realUser)) {
+ ShimLoader.getHadoopShims().
+ authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
+ }
+ } catch (IOException e) {
+ throw new HiveSQLException("Failed to validate proxy privilage of " + realUser +
+ " for " + proxyUser, e);
+ }
+ }
+
}
diff --git service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
index 519556c..93ec545 100644
--- service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
+++ service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
@@ -74,5 +74,17 @@ public static TTransport getKerberosTransport(String principal, String host,
}
}
+ public static TTransport getTokenTransport(String tokenStr, String host,
+ final TTransport underlyingTransport, Map saslProps) throws SaslException {
+ HadoopThriftAuthBridge.Client authBridge =
+ ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
+
+ try {
+ return authBridge.createClientTransport(null, host,
+ "DIGEST", tokenStr, underlyingTransport, saslProps);
+ } catch (IOException e) {
+ throw new SaslException("Failed to open client transport", e);
+ }
+ }
}
diff --git service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
index 15b1675..5a4519f 100644
--- service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
+++ service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
@@ -97,20 +97,16 @@ public void handle(Callback[] callbacks)
private static class SQLPlainProcessorFactory extends TProcessorFactory {
private final ThriftCLIService service;
private final HiveConf conf;
- private final boolean doAsEnabled;
public SQLPlainProcessorFactory(ThriftCLIService service) {
super(null);
this.service = service;
this.conf = service.getHiveConf();
- this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
}
@Override
public TProcessor getProcessor(TTransport trans) {
- TProcessor baseProcessor = new TCLIService.Processor(service);
- return doAsEnabled ? new TUGIContainingProcessor(baseProcessor, conf) :
- new TSetIpAddressProcessor(service);
+ return new TSetIpAddressProcessor(service);
}
}
diff --git service/src/java/org/apache/hive/service/cli/CLIService.java service/src/java/org/apache/hive/service/cli/CLIService.java
index 2b1e712..bdc943e 100644
--- service/src/java/org/apache/hive/service/cli/CLIService.java
+++ service/src/java/org/apache/hive/service/cli/CLIService.java
@@ -427,4 +427,28 @@ private void setupStagingDir(String dirPath, boolean isLocal) throws IOException
fs.setPermission(scratchDir, fsPermission);
}
}
+
+ @Override
+ public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String owner, String renewer) throws HiveSQLException {
+ String delegationToken = sessionManager.getSession(sessionHandle).
+ getDelegationToken(authFactory, owner, renewer);
+ LOG.info(sessionHandle + ": getDelegationToken()");
+ return delegationToken;
+ }
+
+ @Override
+ public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException {
+ sessionManager.getSession(sessionHandle).
+ cancelDelegationToken(authFactory, tokenStr);
+ LOG.info(sessionHandle + ": cancelDelegationToken()");
+ }
+
+ @Override
+ public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException {
+ sessionManager.getSession(sessionHandle).renewDelegationToken(authFactory, tokenStr);
+ LOG.info(sessionHandle + ": renewDelegationToken()");
+ }
}
diff --git service/src/java/org/apache/hive/service/cli/CLIServiceClient.java service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
index b9d1489..87c10b9 100644
--- service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
+++ service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
@@ -20,6 +20,8 @@
import java.util.Collections;
+import org.apache.hive.service.auth.HiveAuthFactory;
+
/**
* CLIServiceClient.
@@ -41,4 +43,16 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 1000);
}
+ @Override
+ public abstract String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String owner, String renewer) throws HiveSQLException;
+
+ @Override
+ public abstract void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException;
+
+ @Override
+ public abstract void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException;
+
}
diff --git service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
index a31ea94..f665146 100644
--- service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
+++ service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
@@ -21,6 +21,8 @@
import java.util.List;
import java.util.Map;
+import org.apache.hive.service.auth.HiveAuthFactory;
+
/**
* EmbeddedCLIServiceClient.
@@ -188,4 +190,22 @@ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientatio
return cliService.fetchResults(opHandle, orientation, maxRows);
}
+
+ @Override
+ public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String owner, String renewer) throws HiveSQLException {
+ return cliService.getDelegationToken(sessionHandle, authFactory, owner, renewer);
+ }
+
+ @Override
+ public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException {
+ cliService.cancelDelegationToken(sessionHandle, authFactory, tokenStr);
+ }
+
+ @Override
+ public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException {
+ cliService.renewDelegationToken(sessionHandle, authFactory, tokenStr);
+ }
}
diff --git service/src/java/org/apache/hive/service/cli/ICLIService.java service/src/java/org/apache/hive/service/cli/ICLIService.java
index 621d689..c569796 100644
--- service/src/java/org/apache/hive/service/cli/ICLIService.java
+++ service/src/java/org/apache/hive/service/cli/ICLIService.java
@@ -23,6 +23,8 @@
+import org.apache.hive.service.auth.HiveAuthFactory;
+
public interface ICLIService {
public abstract SessionHandle openSession(String username, String password,
@@ -91,4 +93,14 @@ public abstract RowSet fetchResults(OperationHandle opHandle, FetchOrientation o
public abstract RowSet fetchResults(OperationHandle opHandle)
throws HiveSQLException;
+ public abstract String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String owner, String renewer) throws HiveSQLException;
+
+ public abstract void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException;
+
+ public abstract void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException;
+
+
}
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSession.java service/src/java/org/apache/hive/service/cli/session/HiveSession.java
index c8fb8ec..b1bad47 100644
--- service/src/java/org/apache/hive/service/cli/session/HiveSession.java
+++ service/src/java/org/apache/hive/service/cli/session/HiveSession.java
@@ -24,6 +24,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.GetInfoType;
import org.apache.hive.service.cli.GetInfoValue;
@@ -183,4 +184,17 @@ public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientatio
public String getUserName();
public void setUserName(String userName);
+
+ public String getIpAddress();
+
+ public void setIpAddress(String ipAddress);
+
+ public String getDelegationToken(HiveAuthFactory authFactory, String owner,
+ String renewer) throws HiveSQLException;
+
+ public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr)
+ throws HiveSQLException;
+
+ public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr)
+ throws HiveSQLException;
}
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index d6d0d27..d406358 100644
--- service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -35,7 +35,9 @@
import org.apache.hadoop.hive.ql.exec.ListSinkOperator;
import org.apache.hadoop.hive.ql.history.HiveHistory;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hive.common.util.HiveVersionInfo;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.GetInfoType;
import org.apache.hive.service.cli.GetInfoValue;
@@ -65,8 +67,9 @@
private String username;
private final String password;
- private final HiveConf hiveConf;
+ private final HiveConf hiveConf = new HiveConf();
private final SessionState sessionState;
+ private String ipAddress;
private static final String FETCH_WORK_SERDE_CLASS =
"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe";
@@ -79,11 +82,11 @@
private final Set opHandleSet = new HashSet();
public HiveSessionImpl(TProtocolVersion protocol, String username, String password,
- HiveConf serverhiveConf, Map sessionConfMap) {
+ Map sessionConfMap, String ipAddress) {
this.username = username;
this.password = password;
this.sessionHandle = new SessionHandle(protocol);
- this.hiveConf = new HiveConf(serverhiveConf);
+ this.ipAddress = ipAddress;
//set conf properties specified by user from client side
if (sessionConfMap != null) {
@@ -418,6 +421,7 @@ public SessionState getSessionState() {
public String getUserName() {
return username;
}
+
@Override
public void setUserName(String userName) {
this.username = userName;
@@ -479,4 +483,42 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
protected HiveSession getSession() {
return this;
}
+
+ @Override
+ public String getIpAddress() {
+ return ipAddress;
+ }
+
+ @Override
+ public void setIpAddress(String ipAddress) {
+ this.ipAddress = ipAddress;
+ }
+
+ @Override
+ public String getDelegationToken(HiveAuthFactory authFactory, String owner, String renewer)
+ throws HiveSQLException {
+ HiveAuthFactory.verifyProxyAccess(getUsername(), owner, getIpAddress(), getHiveConf());
+ return authFactory.getDelegationToken(owner, renewer);
+ }
+
+ @Override
+ public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr)
+ throws HiveSQLException {
+ HiveAuthFactory.verifyProxyAccess(getUsername(), getUserFromToken(authFactory, tokenStr),
+ getIpAddress(), getHiveConf());
+ authFactory.cancelDelegationToken(tokenStr);
+ }
+
+ @Override
+ public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr)
+ throws HiveSQLException {
+ HiveAuthFactory.verifyProxyAccess(getUsername(), getUserFromToken(authFactory, tokenStr),
+ getIpAddress(), getHiveConf());
+ authFactory.renewDelegationToken(tokenStr);
+ }
+
+ // extract the real user from the given token string
+ private String getUserFromToken(HiveAuthFactory authFactory, String tokenStr) throws HiveSQLException {
+ return authFactory.getUserFromToken(tokenStr);
+ }
}
diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
index b934ebe..1370bc4 100644
--- service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
+++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
@@ -26,6 +26,7 @@
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.thrift.TProtocolVersion;
@@ -43,8 +44,9 @@
private HiveSession proxySession = null;
public HiveSessionImplwithUGI(TProtocolVersion protocol, String username, String password,
- HiveConf hiveConf, Map sessionConf, String delegationToken) throws HiveSQLException {
- super(protocol, username, password, hiveConf, sessionConf);
+ Map sessionConf, String ipAddress,
+ String delegationToken) throws HiveSQLException {
+ super(protocol, username, password, sessionConf, ipAddress);
setSessionUGI(username);
setDelegationToken(delegationToken);
}
@@ -148,5 +150,22 @@ public void setProxySession(HiveSession proxySession) {
this.proxySession = proxySession;
}
+ @Override
+ public String getDelegationToken(HiveAuthFactory authFactory, String owner,
+ String renewer) throws HiveSQLException {
+ return authFactory.getDelegationToken(owner, renewer);
+ }
+
+ @Override
+ public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr)
+ throws HiveSQLException {
+ authFactory.cancelDelegationToken(tokenStr);
+ }
+
+ @Override
+ public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr)
+ throws HiveSQLException {
+ authFactory.renewDelegationToken(tokenStr);
+ }
}
diff --git service/src/java/org/apache/hive/service/cli/session/SessionManager.java service/src/java/org/apache/hive/service/cli/session/SessionManager.java
index cec3b04..fafaf95 100644
--- service/src/java/org/apache/hive/service/cli/session/SessionManager.java
+++ service/src/java/org/apache/hive/service/cli/session/SessionManager.java
@@ -18,6 +18,7 @@
package org.apache.hive.service.cli.session;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -31,6 +32,9 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.hooks.HookUtils;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.CompositeService;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.SessionHandle;
@@ -101,17 +105,14 @@ public SessionHandle openSession(TProtocolVersion protocol, String username, Str
public SessionHandle openSession(TProtocolVersion protocol, String username, String password,
Map sessionConf, boolean withImpersonation, String delegationToken)
throws HiveSQLException {
- if (username == null) {
- username = threadLocalUserName.get();
- }
HiveSession session;
if (withImpersonation) {
HiveSessionImplwithUGI hiveSessionUgi = new HiveSessionImplwithUGI(protocol, username, password,
- hiveConf, sessionConf, delegationToken);
+ sessionConf, threadLocalIpAddress.get(), delegationToken);
session = HiveSessionProxy.getProxy(hiveSessionUgi, hiveSessionUgi.getSessionUgi());
hiveSessionUgi.setProxySession(session);
} else {
- session = new HiveSessionImpl(protocol, username, password, hiveConf, sessionConf);
+ session = new HiveSessionImpl(protocol, username, password, sessionConf, threadLocalIpAddress.get());
}
session.setSessionManager(this);
session.setOperationManager(operationManager);
@@ -161,6 +162,10 @@ private void clearIpAddress() {
threadLocalIpAddress.remove();
}
+ public static String getIpAddress() {
+ return threadLocalIpAddress.get();
+ }
+
private static ThreadLocal threadLocalUserName = new ThreadLocal(){
@Override
protected synchronized String initialValue() {
@@ -176,6 +181,10 @@ private void clearUserName() {
threadLocalUserName.remove();
}
+ public static String getUserName() {
+ return threadLocalUserName.get();
+ }
+
// execute session hooks
private void executeSessionHooks(HiveSession session) throws Exception {
List sessionHooks = HookUtils.getHooks(hiveConf,
@@ -190,3 +199,4 @@ private void executeSessionHooks(HiveSession session) throws Exception {
}
}
+
diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 26bda5a..0c9ac37 100644
--- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -29,6 +29,8 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.AbstractService;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.CLIService;
@@ -41,6 +43,7 @@
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.session.SessionManager;
import org.apache.thrift.TException;
import org.apache.thrift.server.TServer;
@@ -111,6 +114,75 @@ public synchronized void stop() {
super.stop();
}
+ @Override
+ public TGetDelegationTokenResp GetDelegationToken(TGetDelegationTokenReq req)
+ throws TException {
+ TGetDelegationTokenResp resp = new TGetDelegationTokenResp();
+
+ if (hiveAuthFactory == null) {
+ resp.setStatus(unsecureTokenErrorStatus());
+ } else {
+ try {
+ String token = cliService.getDelegationToken(
+ new SessionHandle(req.getSessionHandle()),
+ hiveAuthFactory, req.getOwner(), req.getRenewer());
+ resp.setDelegationToken(token);
+ resp.setStatus(OK_STATUS);
+ } catch (HiveSQLException e) {
+ LOG.error("Error obtaining delegation token", e);
+ TStatus tokenErrorStatus = HiveSQLException.toTStatus(e);
+ tokenErrorStatus.setSqlState("42000");
+ resp.setStatus(tokenErrorStatus);
+ }
+ }
+ return resp;
+ }
+
+ @Override
+ public TCancelDelegationTokenResp CancelDelegationToken(TCancelDelegationTokenReq req)
+ throws TException {
+ TCancelDelegationTokenResp resp = new TCancelDelegationTokenResp();
+
+ if (hiveAuthFactory == null) {
+ resp.setStatus(unsecureTokenErrorStatus());
+ } else {
+ try {
+ cliService.cancelDelegationToken(new SessionHandle(req.getSessionHandle()),
+ hiveAuthFactory, req.getDelegationToken());
+ resp.setStatus(OK_STATUS);
+ } catch (HiveSQLException e) {
+ LOG.error("Error canceling delegation token", e);
+ resp.setStatus(HiveSQLException.toTStatus(e));
+ }
+ }
+ return resp;
+ }
+
+ @Override
+ public TRenewDelegationTokenResp RenewDelegationToken(TRenewDelegationTokenReq req)
+ throws TException {
+ TRenewDelegationTokenResp resp = new TRenewDelegationTokenResp();
+ if (hiveAuthFactory == null) {
+ resp.setStatus(unsecureTokenErrorStatus());
+ } else {
+ try {
+ cliService.renewDelegationToken(new SessionHandle(req.getSessionHandle()),
+ hiveAuthFactory, req.getDelegationToken());
+ resp.setStatus(OK_STATUS);
+ } catch (HiveSQLException e) {
+ LOG.error("Error obtaining renewing token", e);
+ resp.setStatus(HiveSQLException.toTStatus(e));
+ }
+ }
+ return resp;
+ }
+
+ private TStatus unsecureTokenErrorStatus() {
+ TStatus errorStatus = new TStatus(TStatusCode.ERROR_STATUS);
+ errorStatus.setErrorMessage("Delegation token only supported over remote " +
+ "client with kerberos authentication");
+ return errorStatus;
+ }
@Override
public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException {
@@ -129,13 +201,25 @@ public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException {
return resp;
}
- private String getUserName(TOpenSessionReq req) {
+ private String getIpAddress() {
+ if (hiveAuthFactory != null) {
+ return hiveAuthFactory.getIpAddress();
+ }
+ return SessionManager.getIpAddress();
+ }
+
+ private String getUserName(TOpenSessionReq req) throws HiveSQLException {
+ String userName;
if (hiveAuthFactory != null
&& hiveAuthFactory.getRemoteUser() != null) {
- return hiveAuthFactory.getRemoteUser();
+ userName = hiveAuthFactory.getRemoteUser();
} else {
- return req.getUsername();
+ userName = SessionManager.getUserName();
}
+ if (userName == null) {
+ userName = req.getUsername();
+ }
+ return getProxyUser(userName, req.getConfiguration(), getIpAddress());
}
SessionHandle getSessionHandle(TOpenSessionReq req, TOpenSessionResp res)
@@ -145,9 +229,8 @@ SessionHandle getSessionHandle(TOpenSessionReq req, TOpenSessionResp res)
TProtocolVersion protocol = getMinVersion(CLIService.SERVER_VERSION, req.getClient_protocol());
SessionHandle sessionHandle;
- if (cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)
- .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString()) &&
- cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
+ if (cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) &&
+ (userName != null)) {
String delegationTokenStr = null;
try {
delegationTokenStr = cliService.getDelegationTokenFromMetaStore(userName);
@@ -420,4 +503,36 @@ public TFetchResultsResp FetchResults(TFetchResultsReq req) throws TException {
@Override
public abstract void run();
+
+ /**
+ * If the proxy user name is provided then check privileges to substitute the user.
+ * @param realUser
+ * @param sessionConf
+ * @param ipAddress
+ * @return
+ * @throws HiveSQLException
+ */
+ private String getProxyUser(String realUser, Map sessionConf,
+ String ipAddress) throws HiveSQLException {
+ if (sessionConf == null || !sessionConf.containsKey(HiveAuthFactory.HS2_PROXY_USER)) {
+ return realUser;
+ }
+
+ // Extract the proxy user name and check if we are allowed to do the substitution
+ String proxyUser = sessionConf.get(HiveAuthFactory.HS2_PROXY_USER);
+ if (!hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ALLOW_USER_SUBSTITUTION)) {
+ throw new HiveSQLException("Proxy user substitution is not allowed");
+ }
+
+ // If there's no authentication, then directly substitute the user
+ if (HiveAuthFactory.AuthTypes.NONE.toString().
+ equalsIgnoreCase(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION))) {
+ return proxyUser;
+ }
+
+ // Verify proxy user privilege of the realUser for the proxyUser
+ HiveAuthFactory.verifyProxyAccess(realUser, proxyUser, ipAddress, hiveConf);
+ return proxyUser;
+ }
}
+
diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
index 3675e86..e3384d3 100644
--- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
+++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
@@ -21,6 +21,7 @@
import java.util.List;
import java.util.Map;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.CLIServiceClient;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.GetInfoType;
@@ -33,6 +34,7 @@
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.cli.TableSchema;
+import org.apache.thrift.TException;
/**
* ThriftCLIServiceClient.
@@ -404,4 +406,48 @@ public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
// TODO: set the correct default fetch size
return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 10000);
}
+
+ @Override
+ public String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String owner, String renewer) throws HiveSQLException {
+ TGetDelegationTokenReq req = new TGetDelegationTokenReq(
+ sessionHandle.toTSessionHandle(), owner, renewer);
+ try {
+ TGetDelegationTokenResp tokenResp = cliService.GetDelegationToken(req);
+ checkStatus(tokenResp.getStatus());
+ return tokenResp.getDelegationToken();
+ } catch (Exception e) {
+ throw new HiveSQLException(e);
+ }
+ }
+
+ @Override
+ public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException {
+ TCancelDelegationTokenReq cancelReq = new TCancelDelegationTokenReq(
+ sessionHandle.toTSessionHandle(), tokenStr);
+ try {
+ TCancelDelegationTokenResp cancelResp =
+ cliService.CancelDelegationToken(cancelReq);
+ checkStatus(cancelResp.getStatus());
+ return;
+ } catch (TException e) {
+ throw new HiveSQLException(e);
+ }
+ }
+
+ @Override
+ public void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+ String tokenStr) throws HiveSQLException {
+ TRenewDelegationTokenReq cancelReq = new TRenewDelegationTokenReq(
+ sessionHandle.toTSessionHandle(), tokenStr);
+ try {
+ TRenewDelegationTokenResp renewResp =
+ cliService.RenewDelegationToken(cancelReq);
+ checkStatus(renewResp.getStatus());
+ return;
+ } catch (Exception e) {
+ throw new HiveSQLException(e);
+ }
+ }
}
diff --git service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
index 8fa4afd..fb784aa 100644
--- service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
+++ service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
@@ -45,6 +45,6 @@ public void testDoAsSetting(){
tcliService.init(hconf);
TProcessorFactory procFactory = PlainSaslHelper.getPlainProcessorFactory(tcliService);
assertEquals("doAs enabled processor for unsecure mode",
- procFactory.getProcessor(null).getClass(), TUGIContainingProcessor.class);
+ procFactory.getProcessor(null).getClass(), TSetIpAddressProcessor.class);
}
}
diff --git service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
index 2fac800..49bc54b 100644
--- service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
+++ service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
@@ -19,12 +19,15 @@
package org.apache.hive.service.cli.session;
import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
@@ -34,7 +37,7 @@
public class TestSessionHooks extends TestCase {
- public static final String SESSION_USER_NAME = "user1";
+ private static String sessionUserName = "user1";
private EmbeddedThriftBinaryCLIService service;
private ThriftCLIServiceClient client;
@@ -44,7 +47,7 @@
@Override
public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException {
- Assert.assertEquals(sessionHookContext.getSessionUser(), SESSION_USER_NAME);
+ Assert.assertEquals(sessionHookContext.getSessionUser(), sessionUserName);
String sessionHook = sessionHookContext.getSessionConf().
getVar(ConfVars.HIVE_SERVER2_SESSION_HOOK);
Assert.assertTrue(sessionHook.contains(this.getClass().getName()));
@@ -56,6 +59,7 @@ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLExcepti
@Before
public void setUp() throws Exception {
super.setUp();
+ SessionHookTest.runCount.set(0);
System.setProperty(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,
TestSessionHooks.SessionHookTest.class.getName());
service = new EmbeddedThriftBinaryCLIService();
@@ -65,9 +69,25 @@ public void setUp() throws Exception {
@Test
public void testSessionHook () throws Exception {
// create session, test if the hook got fired by checking the expected property
- SessionHandle sessionHandle = client.openSession(SESSION_USER_NAME, "foobar",
+ SessionHandle sessionHandle = client.openSession(sessionUserName, "foobar",
Collections.emptyMap());
Assert.assertEquals(1, SessionHookTest.runCount.get());
client.closeSession(sessionHandle);
}
+
+ /***
+ * Create session with proxy user property. Verify the effective session user
+ * @throws Exception
+ */
+ @Test
+ public void testProxyUser() throws Exception {
+ String connectingUser = "user1";
+ String proxyUser = System.getProperty("user.name");
+ MapsessConf = new HashMap();
+ sessConf.put(HiveAuthFactory.HS2_PROXY_USER, proxyUser);
+ sessionUserName = proxyUser;
+ SessionHandle sessionHandle = client.openSession(connectingUser, "foobar", sessConf);
+ Assert.assertEquals(1, SessionHookTest.runCount.get());
+ client.closeSession(sessionHandle);
+ }
}
diff --git shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
index 51c8051..b250963 100644
--- shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
+++ shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
@@ -556,6 +556,12 @@ public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenS
}
@Override
+ public String addServiceToToken(String tokenStr, String tokenService) throws IOException {
+ throw new UnsupportedOperationException("Tokens are not supported in current hadoop version");
+ }
+
+
+ @Override
public T doAs(UserGroupInformation ugi, PrivilegedExceptionAction pvea) throws
IOException, InterruptedException {
try {
@@ -637,6 +643,11 @@ public void remove() {
}
@Override
+ public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi,
+ String ipAddress, Configuration conf) throws IOException {
+ // This hadoop version doesn't have proxy verification
+ }
+
public boolean isSecurityEnabled() {
return false;
}
diff --git shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
index e205caa..d4cddda 100644
--- shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
+++ shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
@@ -64,6 +64,7 @@
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenSelector;
@@ -459,12 +460,39 @@ public String getTokenStrForm(String tokenSignature) throws IOException {
return token != null ? token.encodeToUrlString() : null;
}
+ /**
+ * Create a delegation token object for the given token string and service.
+ * Add the token to given UGI
+ */
@Override
public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService) throws IOException {
+ Token delegationToken = createToken(tokenStr, tokenService);
+ ugi.addToken(delegationToken);
+ }
+
+ /**
+ * Add a given service to delegation token string.
+ */
+ @Override
+ public String addServiceToToken(String tokenStr, String tokenService)
+ throws IOException {
+ Token delegationToken = createToken(tokenStr, tokenService);
+ return delegationToken.encodeToUrlString();
+ }
+
+ /**
+ * Create a new token using the given string and service
+ * @param tokenStr
+ * @param tokenService
+ * @return
+ * @throws IOException
+ */
+ private Token createToken(String tokenStr, String tokenService)
+ throws IOException {
Token delegationToken = new Token();
delegationToken.decodeFromUrlString(tokenStr);
delegationToken.setService(new Text(tokenService));
- ugi.addToken(delegationToken);
+ return delegationToken;
}
@Override
@@ -498,6 +526,13 @@ public UserGroupInformation createProxyUser(String userName) throws IOException
}
@Override
+ public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi,
+ String ipAddress, Configuration conf) throws IOException {
+ ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, realUserUgi),
+ ipAddress, conf);
+ }
+
+ @Override
public boolean isSecurityEnabled() {
return UserGroupInformation.isSecurityEnabled();
}
diff --git shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
index 29114f0..19d1fbf 100644
--- shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
+++ shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.thrift;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
import java.io.IOException;
import org.apache.hadoop.io.Text;
@@ -83,5 +85,16 @@ public synchronized String getDelegationToken(String renewer) throws IOException
ident, this);
return t.encodeToUrlString();
}
+
+ public String getUserFromToken(String tokenStr) throws IOException {
+ Token delegationToken = new Token();
+ delegationToken.decodeFromUrlString(tokenStr);
+
+ ByteArrayInputStream buf = new ByteArrayInputStream(delegationToken.getIdentifier());
+ DataInputStream in = new DataInputStream(buf);
+ DelegationTokenIdentifier id = createIdentifier();
+ id.readFields(in);
+ return id.getUser().getShortUserName();
+ }
}
diff --git shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
index dc89de1..e3f3e38 100644
--- shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
+++ shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
@@ -43,6 +43,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport;
import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
@@ -401,6 +402,13 @@ public String run() throws IOException {
}
@Override
+ public String getDelegationTokenWithService(String owner, String renewer, String service)
+ throws IOException, InterruptedException {
+ String token = getDelegationToken(owner, renewer);
+ return ShimLoader.getHadoopShims().addServiceToToken(token, service);
+ }
+
+ @Override
public long renewDelegationToken(String tokenStrForm) throws IOException {
if (!authenticationMethod.get().equals(AuthenticationMethod.KERBEROS)) {
throw new AuthorizationException(
@@ -412,6 +420,11 @@ public long renewDelegationToken(String tokenStrForm) throws IOException {
}
@Override
+ public String getUserFromToken(String tokenStr) throws IOException {
+ return secretManager.getUserFromToken(tokenStr);
+ }
+
+ @Override
public void cancelDelegationToken(String tokenStrForm) throws IOException {
secretManager.cancelDelegationToken(tokenStrForm);
}
diff --git shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
index e15ab4e..1f24a94 100644
--- shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
+++ shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
@@ -230,7 +230,7 @@ public URI getHarUri(URI original, URI base, URI originalBase)
* @return the string form of the token found
* @throws IOException
*/
- String getTokenStrForm(String tokenSignature) throws IOException;
+ public String getTokenStrForm(String tokenSignature) throws IOException;
/**
* Add a delegation token to the given ugi
@@ -239,9 +239,18 @@ public URI getHarUri(URI original, URI base, URI originalBase)
* @param tokenService
* @throws IOException
*/
- void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
+ public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
throws IOException;
+ /**
+ * Add given service to the string format token
+ * @param tokenStr
+ * @param tokenService
+ * @return
+ * @throws IOException
+ */
+ public String addServiceToToken(String tokenStr, String tokenService)
+ throws IOException;
enum JobTrackerState { INITIALIZING, RUNNING };
@@ -347,7 +356,14 @@ public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration co
* @param userName
* @return
*/
- UserGroupInformation createProxyUser(String userName) throws IOException;
+ public UserGroupInformation createProxyUser(String userName) throws IOException;
+
+ /**
+ * Verify proxy access to given UGI for given user
+ * @param ugi
+ */
+ public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi,
+ String ipAddress, Configuration conf) throws IOException;
/**
* The method sets to set the partition file has a different signature between
diff --git shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
index 03f4e51..e69373a 100644
--- shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
+++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
@@ -91,11 +91,14 @@ public abstract TTransport createClientTransport(
public abstract InetAddress getRemoteAddress();
public abstract void startDelegationTokenSecretManager(Configuration conf,
Object hmsHandler) throws IOException;
- public abstract String getRemoteUser();
public abstract String getDelegationToken(String owner, String renewer)
- throws IOException, InterruptedException;
+ throws IOException, InterruptedException;
+ public abstract String getDelegationTokenWithService(String owner, String renewer, String service)
+ throws IOException, InterruptedException;
+ public abstract String getRemoteUser();
public abstract long renewDelegationToken(String tokenStrForm) throws IOException;
public abstract void cancelDelegationToken(String tokenStrForm) throws IOException;
+ public abstract String getUserFromToken(String tokenStr) throws IOException;
}
}