diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 11c3121..1d06c5b 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -725,6 +725,7 @@
HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000),
HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""),
+ HIVE_SERVER2_THRIFT_RPC_PROTECTION("hive.server2.thrift.rpc.protection", "auth"),
// HiveServer2 auth configuration
diff --git a/data/conf/hive-site.xml b/data/conf/hive-site.xml
index 4e6ff16..8fe1e9f 100644
--- a/data/conf/hive-site.xml
+++ b/data/conf/hive-site.xml
@@ -194,4 +194,9 @@
The default SerDe hive will use for the rcfile format
+
+ hive.server2.thrift.rpc.protection
+ auth
+ Sasl QOP value; one of 'auth', 'auth-int' and 'auth-conf'
+
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 00f4351..1eeef7c 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -33,6 +33,7 @@
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
+import java.util.HashMap;
import java.util.concurrent.Executor;
import java.util.LinkedList;
import java.util.List;
@@ -40,11 +41,13 @@
import java.util.Map.Entry;
import java.util.Properties;
+import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
+import org.apache.hive.service.auth.SaslQOP;
import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
import org.apache.hive.service.cli.thrift.TCLIService;
import org.apache.hive.service.cli.thrift.TCloseSessionReq;
@@ -132,8 +135,19 @@ private void openTransport(String uri, String host, int port, Map saslProps = new HashMap();
+ SaslQOP saslQOP = SaslQOP.AUTH;
+ if(sessConf.containsKey(HIVE_AUTH_TYPE)) {
+ try {
+ saslQOP = SaslQOP.fromString(sessConf.get(HIVE_AUTH_TYPE));
+ } catch (IllegalArgumentException e) {
+ throw new SQLException("Invalid " + HIVE_AUTH_TYPE + " parameter. " + e.getMessage(), "42000");
+ }
+ }
+ saslProps.put(Sasl.QOP, saslQOP.toString());
+ saslProps.put(Sasl.SERVER_AUTH, "true");
transport = KerberosSaslHelper.getKerberosTransport(
- sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport);
+ sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
} else {
String userName = sessConf.get(HIVE_AUTH_USER);
if ((userName == null) || userName.isEmpty()) {
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 1809e1b..f42d333 100644
--- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -20,6 +20,7 @@
import java.io.IOException;
import javax.security.auth.login.LoginException;
+import javax.security.sasl.Sasl;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -30,6 +31,9 @@
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
+import java.util.HashMap;
+import java.util.Map;
+
public class HiveAuthFactory {
public static enum AuthTypes {
@@ -71,13 +75,22 @@ public HiveAuthFactory() throws TTransportException {
}
}
+ public Map getSaslProperties() {
+ Map saslProps = new HashMap();
+ SaslQOP saslQOP =
+ SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_RPC_PROTECTION));
+ saslProps.put(Sasl.QOP, saslQOP.toString());
+ saslProps.put(Sasl.SERVER_AUTH, "true");
+ return saslProps;
+ }
+
public TTransportFactory getAuthTransFactory() throws LoginException {
TTransportFactory transportFactory;
if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
try {
- transportFactory = saslServer.createTransportFactory();
+ transportFactory = saslServer.createTransportFactory(getSaslProperties());
} catch (TTransportException e) {
throw new LoginException(e.getMessage());
}
diff --git a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
index 379dafb..519556c 100644
--- a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
+++ b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
@@ -18,6 +18,7 @@
package org.apache.hive.service.auth;
import java.io.IOException;
+import java.util.Map;
import javax.security.sasl.SaslException;
@@ -56,7 +57,7 @@ public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
}
public static TTransport getKerberosTransport(String principal, String host,
- final TTransport underlyingTransport) throws SaslException {
+ final TTransport underlyingTransport, Map saslProps) throws SaslException {
try {
final String names[] = principal.split("[/@]");
if (names.length != 3) {
@@ -67,7 +68,7 @@ public static TTransport getKerberosTransport(String principal, String host,
HadoopThriftAuthBridge.Client authBridge =
ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
return authBridge.createClientTransport(principal, host,
- "KERBEROS", null, underlyingTransport);
+ "KERBEROS", null, underlyingTransport, saslProps);
} catch (IOException e) {
throw new SaslException("Failed to open client transport", e);
}
diff --git a/service/src/java/org/apache/hive/service/auth/SaslQOP.java b/service/src/java/org/apache/hive/service/auth/SaslQOP.java
new file mode 100644
index 0000000..0b2e7a2
--- /dev/null
+++ b/service/src/java/org/apache/hive/service/auth/SaslQOP.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Possible values of SASL quality-of-protection value.
+ */
+public enum SaslQOP {
+ AUTH("auth"), // Authentication only.
+ AUTH_INT("auth-int"), // Authentication and integrity checking by using signatures.
+ AUTH_CONF("auth-conf"); // Authentication, integrity and confidentiality checking
+ // by using signatures and encryption.
+
+ public final String saslQop;
+
+ private static final Map strToEnum
+ = new HashMap();
+ static {
+ for (SaslQOP SaslQOP : values())
+ strToEnum.put(SaslQOP.toString(), SaslQOP);
+ }
+
+ private SaslQOP(final String saslQop) {
+ this.saslQop = saslQop;
+ }
+
+ public String toString() {
+ return saslQop;
+ }
+
+ public static SaslQOP fromString(String str) {
+ if(str != null) {
+ str = str.toLowerCase();
+ }
+ SaslQOP saslQOP = strToEnum.get(str);
+ if(saslQOP == null) {
+ throw new IllegalArgumentException("Unknown auth type: " + str + " Allowed values are: "
+ + strToEnum.keySet());
+ }
+ return saslQOP;
+ }
+}
diff --git a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
index 777226f..0930334 100644
--- a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
+++ b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
@@ -22,6 +22,7 @@
import java.net.Socket;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
+import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
@@ -106,6 +107,15 @@ public TTransport createClientTransport(
String principalConfig, String host,
String methodStr, String tokenStrForm, TTransport underlyingTransport)
throws IOException {
+ return createClientTransport(principalConfig, host, methodStr, tokenStrForm,
+ underlyingTransport, SaslRpcServer.SASL_PROPS);
+ }
+
+ @Override
+ public TTransport createClientTransport(
+ String principalConfig, String host,
+ String methodStr, String tokenStrForm, TTransport underlyingTransport,
+ Map saslProps) throws IOException {
AuthMethod method = AuthMethod.valueOf(AuthMethod.class, methodStr);
TTransport saslTransport = null;
@@ -114,11 +124,11 @@ public TTransport createClientTransport(
Token t= new Token();
t.decodeFromUrlString(tokenStrForm);
saslTransport = new TSaslClientTransport(
- method.getMechanismName(),
- null,
- null, SaslRpcServer.SASL_DEFAULT_REALM,
- SaslRpcServer.SASL_PROPS, new SaslClientCallbackHandler(t),
- underlyingTransport);
+ method.getMechanismName(),
+ null,
+ null, SaslRpcServer.SASL_DEFAULT_REALM,
+ saslProps, new SaslClientCallbackHandler(t),
+ underlyingTransport);
return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser());
case KERBEROS:
@@ -126,23 +136,23 @@ public TTransport createClientTransport(
String names[] = SaslRpcServer.splitKerberosName(serverPrincipal);
if (names.length != 3) {
throw new IOException(
- "Kerberos principal name does NOT have the expected hostname part: "
- + serverPrincipal);
+ "Kerberos principal name does NOT have the expected hostname part: "
+ + serverPrincipal);
}
try {
saslTransport = new TSaslClientTransport(
- method.getMechanismName(),
- null,
- names[0], names[1],
- SaslRpcServer.SASL_PROPS, null,
- underlyingTransport);
+ method.getMechanismName(),
+ null,
+ names[0], names[1],
+ saslProps, null,
+ underlyingTransport);
return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser());
} catch (SaslException se) {
throw new IOException("Could not instantiate SASL transport", se);
}
default:
- throw new IOException("Unsupported authentication method: " + method);
+ throw new IOException("Unsupported authentication method: " + method);
}
}
private static class SaslClientCallbackHandler implements CallbackHandler {
@@ -275,8 +285,20 @@ protected Server(String keytabFile, String principalConf)
*
*/
@Override
- public TTransportFactory createTransportFactory() throws TTransportException
- {
+ public TTransportFactory createTransportFactory() throws TTransportException {
+ return createTransportFactory(SaslRpcServer.SASL_PROPS);
+ }
+
+ /**
+ * Create a TTransportFactory that, upon connection of a client socket,
+ * negotiates a Kerberized SASL transport. The resulting TTransportFactory
+ * can be passed as both the input and output transport factory when
+ * instantiating a TThreadPoolServer, for example.
+ *
+ * @param saslProps Map of SASL properties
+ */
+ @Override
+ public TTransportFactory createTransportFactory(Map saslProps) throws TTransportException {
// Parse out the kerberos principal, host, realm.
String kerberosName = realUgi.getUserName();
final String names[] = SaslRpcServer.splitKerberosName(kerberosName);
@@ -286,13 +308,13 @@ public TTransportFactory createTransportFactory() throws TTransportException
TSaslServerTransport.Factory transFactory = new TSaslServerTransport.Factory();
transFactory.addServerDefinition(
- AuthMethod.KERBEROS.getMechanismName(),
- names[0], names[1], // two parts of kerberos principal
- SaslRpcServer.SASL_PROPS,
- new SaslRpcServer.SaslGssCallbackHandler());
+ AuthMethod.KERBEROS.getMechanismName(),
+ names[0], names[1], // two parts of kerberos principal
+ saslProps,
+ new SaslRpcServer.SaslGssCallbackHandler());
transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(),
- null, SaslRpcServer.SASL_DEFAULT_REALM,
- SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler(secretManager));
+ null, SaslRpcServer.SASL_DEFAULT_REALM,
+ saslProps, new SaslDigestCallbackHandler(secretManager));
return new TUGIAssumingTransportFactory(transFactory, realUgi);
}
diff --git a/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
index 9b0ec0a..ad91e3b 100644
--- a/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
+++ b/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
@@ -16,17 +16,18 @@
* limitations under the License.
*/
- package org.apache.hadoop.hive.thrift;
+package org.apache.hadoop.hive.thrift;
- import java.io.IOException;
+import java.io.IOException;
import java.net.InetAddress;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.thrift.TProcessor;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
- /**
+/**
* This class is only overridden by the secure hadoop shim. It allows
* the Thrift SASL support to bridge to Hadoop's UserGroupInformation
* & DelegationToken infrastructure.
@@ -67,10 +68,29 @@ public abstract TTransport createClientTransport(
String principalConfig, String host,
String methodStr,String tokenStrForm, TTransport underlyingTransport)
throws IOException;
+ /**
+ *
+ * @param principalConfig In the case of Kerberos authentication this will
+ * be the kerberos principal name, for DIGEST-MD5 (delegation token) based
+ * authentication this will be null
+ * @param host The metastore server host name
+ * @param methodStr "KERBEROS" or "DIGEST"
+ * @param tokenStrForm This is url encoded string form of
+ * org.apache.hadoop.security.token.
+ * @param underlyingTransport the underlying transport
+ * @return the transport
+ * @throws IOException
+ */
+ public abstract TTransport createClientTransport(
+ String principalConfig, String host,
+ String methodStr, String tokenStrForm, TTransport underlyingTransport,
+ Map saslProps)
+ throws IOException;
}
public static abstract class Server {
public abstract TTransportFactory createTransportFactory() throws TTransportException;
+ public abstract TTransportFactory createTransportFactory(Map saslProps) throws TTransportException;
public abstract TProcessor wrapProcessor(TProcessor processor);
public abstract TProcessor wrapNonAssumingProcessor(TProcessor processor);
public abstract InetAddress getRemoteAddress();