diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 44e0255..ff950c9 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -1195,6 +1195,10 @@ private String getDefaultConnectionUrl(CommandLine cl) throws BeelineConfFilePar
if (password != null) {
jdbcConnectionParams.getSessionVars().put(JdbcConnectionParams.AUTH_PASSWD, password);
}
+ String auth = cl.getOptionValue("a");
+ if (auth != null) {
+ jdbcConnectionParams.getSessionVars().put(JdbcConnectionParams.AUTH_TYPE, auth);
+ }
mergedConnectionProperties =
HS2ConnectionFileUtils.mergeUserConnectionPropertiesAndBeelineSite(
userConnectionProperties, jdbcConnectionParams);
diff --git a/hcatalog/webhcat/svr/pom.xml b/hcatalog/webhcat/svr/pom.xml
index 8db62a7..1910c4a 100644
--- a/hcatalog/webhcat/svr/pom.xml
+++ b/hcatalog/webhcat/svr/pom.xml
@@ -92,6 +92,12 @@
com.sun.jersey
jersey-core
${jersey.version}
+
+
+ javax.ws.rs
+ jsr311-api
+
+
com.sun.jersey
@@ -144,6 +150,11 @@
${slf4j.version}
+ org.apache.hive
+ hive-jdbc
+ ${project.version}
+
+
org.apache.hadoop
hadoop-auth
${hadoop.version}
@@ -199,6 +210,11 @@
+
+ javax.ws.rs
+ javax.ws.rs-api
+ ${rs-api.version}
+
org.apache.hive
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
index 00f335f..e694bab 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
@@ -163,6 +163,7 @@
* of escape/unescape methods in {@link org.apache.hadoop.util.StringUtils} in webhcat.
*/
public static final String HIVE_PROPS_NAME = "templeton.hive.properties";
+ public static final String HIVE_SERVER2_URL = "templeton.hive.hs2.url";
public static final String SQOOP_ARCHIVE_NAME = "templeton.sqoop.archive";
public static final String SQOOP_PATH_NAME = "templeton.sqoop.path";
public static final String SQOOP_HOME_PATH = "templeton.sqoop.home";
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
index 3f1968d..3f679ac 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
@@ -28,6 +28,7 @@
import org.apache.commons.exec.ExecuteException;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.hcatalog.templeton.tool.JobSubmissionConstants;
import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
@@ -78,6 +79,11 @@ public EnqueueBean run(String user, Map userArgs,
args.add("-p");
args.add("default");
+ if (UserGroupInformation.isSecurityEnabled()) {
+ args.add("-a");
+ args.add("delegationToken");
+ }
+
//add mapreduce job tag placeholder
args.add("--hiveconf");
args.add(TempletonControllerJob.HIVE_QUERY_TAG_ARG_PLACEHOLDER);
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
index c503a7a..bbe5947 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
@@ -50,6 +50,7 @@
public class SecureProxySupport {
private Path tokenPath;
public static final String HCAT_SERVICE = "hcat";
+ public static final String HIVE_SERVICE = "hive";
private final boolean isEnabled;
private String user;
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
index b1f4a6a..a776a0b 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
@@ -18,6 +18,10 @@
*/
package org.apache.hive.hcatalog.templeton.tool;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hive.hcatalog.templeton.SecureProxySupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -153,10 +157,10 @@ else if(TempletonUtils.isset(System.getenv(pathVarName))) {
env.put(pathVarName, paths);
}
}
- protected Process startJob(Configuration conf, String jobId, String user, String overrideClasspath,
+ protected Process startJob(Context context, String jobId, String user, String overrideClasspath,
LauncherDelegator.JobType jobType)
throws IOException, InterruptedException {
-
+ Configuration conf = context.getConfiguration();
copyLocal(COPY_NAME, conf);
String[] jarArgs = TempletonUtils.decodeArray(conf.get(JAR_ARGS_NAME));
@@ -174,6 +178,16 @@ protected Process startJob(Configuration conf, String jobId, String user, String
handleTokenFile(jarArgsList, JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER, "mapreduce.job.credentials.binary");
handleTokenFile(jarArgsList, JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER_TEZ, "tez.credentials.path");
if (jobType == LauncherDelegator.JobType.HIVE) {
+ Credentials cred = new Credentials();
+ Token extends TokenIdentifier> token = context.getCredentials().getToken(new
+ Text(SecureProxySupport.HIVE_SERVICE));
+ cred.addToken(new
+ Text(SecureProxySupport.HIVE_SERVICE), token);
+ File t = File.createTempFile("templeton", null);
+ Path tokenPath = new Path(t.toURI());
+ cred.writeTokenStorageFile(tokenPath, conf);
+ env.put(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION,
+ tokenPath.toUri().getPath());
replaceJobTag(jarArgsList, JobSubmissionConstants.HIVE_QUERY_TAG_ARG_PLACEHOLDER,
JobSubmissionConstants.HIVE_QUERY_TAG, jobId);
} else {
@@ -405,7 +419,7 @@ public void run(Context context) throws IOException, InterruptedException {
killLauncherChildJobs(conf, context.getJobID().toString());
// Start the job
- Process proc = startJob(conf,
+ Process proc = startJob(context,
context.getJobID().toString(),
conf.get("user.name"),
conf.get(OVERRIDE_CLASSPATH),
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
index bbb33cc..834b54b 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
@@ -20,8 +20,12 @@
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
+import java.sql.DriverManager;
+import java.sql.SQLException;
import java.util.Arrays;
+import org.apache.hive.hcatalog.templeton.LauncherDelegator;
+import org.apache.hive.jdbc.HiveConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -142,32 +146,43 @@ public int run(String[] args) throws IOException, InterruptedException, ClassNot
Token mrdt = jc.getDelegationToken(new Text("mr token"));
job.getCredentials().addToken(new Text("mr token"), mrdt);
}
- String metastoreTokenStrForm = addHMSToken(job, user);
+ LauncherDelegator.JobType jobType = LauncherDelegator.JobType.valueOf(conf.get(JOB_TYPE));
+
+ String tokenStrForm = null;
+ if (jobType == LauncherDelegator.JobType.HIVE) {
+ tokenStrForm = addToken(job, user, SecureProxySupport.HIVE_SERVICE);
+ } else {
+ tokenStrForm = addToken(job, user, SecureProxySupport.HCAT_SERVICE);
+ }
job.submit();
JobID submittedJobId = job.getJobID();
- if(metastoreTokenStrForm != null) {
+ if(tokenStrForm != null) {
//so that it can be cancelled later from CompleteDelegator
DelegationTokenCache.getStringFormTokenCache().storeDelegationToken(
- submittedJobId.toString(), metastoreTokenStrForm);
- LOG.debug("Added metastore delegation token for jobId=" + submittedJobId.toString() +
+ submittedJobId.toString(), tokenStrForm);
+ LOG.debug("Added delegation token for jobId=" + submittedJobId.toString() +
" user=" + user);
}
return 0;
}
- private String addHMSToken(Job job, String user) throws IOException, InterruptedException,
+ private String addToken(Job job, String user, String type) throws IOException, InterruptedException,
TException {
if(!secureMetastoreAccess) {
return null;
}
Token hiveToken =
new Token();
- String metastoreTokenStrForm = buildHcatDelegationToken(user);
- hiveToken.decodeFromUrlString(metastoreTokenStrForm);
- job.getCredentials().addToken(new
- Text(SecureProxySupport.HCAT_SERVICE), hiveToken);
- return metastoreTokenStrForm;
+ String tokenStrForm;
+ if (type.equals(SecureProxySupport.HIVE_SERVICE)) {
+ tokenStrForm = buildHS2DelegationToken(user);
+ } else {
+ tokenStrForm = buildHcatDelegationToken(user);
+ }
+ hiveToken.decodeFromUrlString(tokenStrForm);
+ job.getCredentials().addToken(new Text(type), hiveToken);
+ return tokenStrForm;
}
private String buildHcatDelegationToken(String user) throws IOException, InterruptedException,
TException {
@@ -189,4 +204,37 @@ public String run() throws IOException, TException, InterruptedException {
}
});
}
+
+ private String buildHS2DelegationToken(String user) throws IOException, InterruptedException,
+ TException {
+ final HiveConf c = new HiveConf();
+ LOG.debug("Creating hiveserver2 delegation token for user " + user);
+ final UserGroupInformation ugi = UgiFactory.getUgi(user);
+ UserGroupInformation real = ugi.getRealUser();
+ return real.doAs(new PrivilegedExceptionAction() {
+ @Override
+ public String run() throws IOException, TException, InterruptedException {
+ try {
+ Class.forName("org.apache.hive.jdbc.HiveDriver");
+ } catch (ClassNotFoundException e) {
+ throw new IOException(e);
+ }
+ String hs2Url = appConf.get(AppConfig.HIVE_SERVER2_URL);
+ final HiveConnection con;
+ try {
+ con = (HiveConnection) DriverManager.getConnection(hs2Url);
+ } catch (SQLException e) {
+ throw new IOException(e);
+ }
+ String token = ugi.doAs(new PrivilegedExceptionAction() {
+ @Override
+ public String run() throws SQLException {
+ String u = ugi.getUserName();
+ return con.getDelegationToken(u,u);
+ }
+ });
+ return token;
+ }
+ });
+ }
}
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 738450f..2064d3a 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -18,6 +18,12 @@
package org.apache.hive.jdbc;
+import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hive.service.rpc.thrift.TSetClientInfoResp;
import org.apache.hive.service.rpc.thrift.TSetClientInfoReq;
@@ -73,6 +79,7 @@
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import java.io.BufferedReader;
+import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
@@ -738,7 +745,23 @@ private String getClientDelegationToken(Map jdbcConnConf)
if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) {
// check delegation token in job conf if any
try {
- tokenStr = SessionUtils.getTokenStrForm(HiveAuthConstants.HS2_CLIENT_TOKEN);
+ if (System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION) != null) {
+ try {
+ Credentials cred = new Credentials();
+ DataInputStream dis = new DataInputStream(new FileInputStream(System.getenv(UserGroupInformation
+ .HADOOP_TOKEN_FILE_LOCATION)));
+ cred.readTokenStorageStream(dis);
+ dis.close();
+ Token extends TokenIdentifier> token = cred.getToken(new Text("hive"));
+ tokenStr = token.encodeToUrlString();
+ } catch (IOException e) {
+ LOG.warn("Cannot get token from environment variable $HADOOP_TOKEN_FILE_LOCATION=" +
+ System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION));
+ }
+ }
+ if (tokenStr == null) {
+ tokenStr = SessionUtils.getTokenStrForm(HiveAuthConstants.HS2_CLIENT_TOKEN);
+ }
} catch (IOException e) {
throw new SQLException("Error reading token ", e);
}
@@ -829,6 +852,7 @@ private boolean isSslConnection() {
private boolean isKerberosAuthMode() {
return !JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))
+ && !JdbcConnectionParams.AUTH_TOKEN.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))
&& sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL);
}
diff --git a/packaging/src/main/assembly/bin.xml b/packaging/src/main/assembly/bin.xml
index 2dd9260..f0f3809 100644
--- a/packaging/src/main/assembly/bin.xml
+++ b/packaging/src/main/assembly/bin.xml
@@ -91,9 +91,12 @@
true
org.apache.hadoop:*
+ org.apache.hive:hive-jdbc:jar:standalone
+ org.apache.httpcomponents:*
org.apache.hive.hcatalog:hive-webhcat:*
+ org.apache.hive:hive-jdbc:jar
diff --git a/pom.xml b/pom.xml
index ed58b07..3dee259 100644
--- a/pom.xml
+++ b/pom.xml
@@ -214,6 +214,7 @@
3.0.0
0.6.0
2.2.4
+ 2.0.1