diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 11c3121..69e38f0 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -725,6 +725,7 @@
HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000),
HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""),
+ HIVE_SERVER2_THRIFT_SASL_QOP("hive.server2.thrift.sasl.qop", "auth"),
// HiveServer2 auth configuration
diff --git a/conf/hive-default.xml.template b/conf/hive-default.xml.template
index 603b475..a291d7d 100644
--- a/conf/hive-default.xml.template
+++ b/conf/hive-default.xml.template
@@ -1917,6 +1917,11 @@
+
+ hive.server2.thrift.sasl.qop
+ auth
+ Sasl QOP value; one of 'auth', 'auth-int' and 'auth-conf'
+
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 00f4351..4644fec 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -33,6 +33,7 @@
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
+import java.util.HashMap;
import java.util.concurrent.Executor;
import java.util.LinkedList;
import java.util.List;
@@ -40,11 +41,13 @@
import java.util.Map.Entry;
import java.util.Properties;
+import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
+import org.apache.hive.service.auth.SaslQOP;
import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
import org.apache.hive.service.cli.thrift.TCLIService;
import org.apache.hive.service.cli.thrift.TCloseSessionReq;
@@ -132,8 +135,19 @@ private void openTransport(String uri, String host, int port, Map saslProps = new HashMap();
+ SaslQOP saslQOP = SaslQOP.AUTH;
+ if(sessConf.containsKey(HIVE_AUTH_TYPE)) {
+ try {
+ saslQOP = SaslQOP.fromString(sessConf.get(HIVE_AUTH_TYPE));
+ } catch (IllegalArgumentException e) {
+ throw new SQLException("Invalid " + HIVE_AUTH_TYPE + " parameter. " + e.getMessage(), "42000", e);
+ }
+ }
+ saslProps.put(Sasl.QOP, saslQOP.toString());
+ saslProps.put(Sasl.SERVER_AUTH, "true");
transport = KerberosSaslHelper.getKerberosTransport(
- sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport);
+ sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
} else {
String userName = sessConf.get(HIVE_AUTH_USER);
if ((userName == null) || userName.isEmpty()) {
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 72eac98..050919a 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -4375,7 +4375,8 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL));
// start delegation token manager
saslServer.startDelegationTokenSecretManager(conf);
- transFactory = saslServer.createTransportFactory();
+ transFactory = saslServer.createTransportFactory(
+ MetaStoreUtils.getMetaStoreSaslProperties(conf));
processor = saslServer.wrapProcessor(new ThriftHiveMetastore.Processor(
newHMSHandler("new db based metaserver", conf)));
LOG.info("Starting DB backed MetaStore Server in Secure Mode");
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index cef50f4..914a4ed 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -71,6 +71,7 @@
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.thrift.TException;
@@ -258,17 +259,17 @@ private void open() throws MetaException {
String tokenSig = conf.get("hive.metastore.token.signature");
// tokenSig could be null
tokenStrForm = shim.getTokenStrForm(tokenSig);
-
if(tokenStrForm != null) {
// authenticate using delegation tokens via the "DIGEST" mechanism
transport = authBridge.createClientTransport(null, store.getHost(),
- "DIGEST", tokenStrForm, transport);
+ "DIGEST", tokenStrForm, transport,
+ MetaStoreUtils.getMetaStoreSaslProperties(conf));
} else {
String principalConfig =
conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL);
transport = authBridge.createClientTransport(
principalConfig, store.getHost(), "KERBEROS", null,
- transport);
+ transport, MetaStoreUtils.getMetaStoreSaslProperties(conf));
}
} catch (IOException ioe) {
LOG.error("Couldn't create client transport", ioe);
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 88151a1..9f3a4b9 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -65,6 +65,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hadoop.security.SaslRpcServer;
public class MetaStoreUtils {
@@ -1264,6 +1265,29 @@ public static boolean compareFieldColumns(List schema1, List getMetaStoreSaslProperties(HiveConf conf) {
+ // As of now Hive Meta Store uses the same configuration as Hadoop SASL configuration
+ return getHadoopSaslProperties(conf);
+ }
+
+ /**
+ * Read and return Hadoop SASL configuration which can be configured using
+ * "hadoop.rpc.protection"
+ * @param conf
+ * @return Hadoop SASL configuration
+ */
+ public static Map getHadoopSaslProperties(HiveConf conf) {
+ // Initialize the SaslRpcServer to ensure QOP parameters are read from conf
+ SaslRpcServer.init(conf);
+ return SaslRpcServer.SASL_PROPS;
+ }
+
private static String getPartitionValWithInvalidCharacter(List partVals,
Pattern partitionValidationPattern) {
if (partitionValidationPattern == null) {
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 1809e1b..da0269a 100644
--- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -20,17 +20,26 @@
import java.io.IOException;
import javax.security.auth.login.LoginException;
+import javax.security.sasl.Sasl;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
import org.apache.hive.service.cli.thrift.ThriftCLIService;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.Map;
public class HiveAuthFactory {
+ private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
public static enum AuthTypes {
NOSASL("NOSASL"),
@@ -71,13 +80,31 @@ public HiveAuthFactory() throws TTransportException {
}
}
+ public Map getSaslProperties() {
+ Map saslProps = new HashMap();
+ SaslQOP saslQOP =
+ SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
+ // hadoop.rpc.protection being set to a higher level than hive.server2.thrift.rpc.protection
+ // does not make sense in most situations. Log warning message in such cases.
+ Map hadoopSaslProps = MetaStoreUtils.getHadoopSaslProperties(conf);
+ SaslQOP hadoopSaslQOP = SaslQOP.fromString(hadoopSaslProps.get(Sasl.QOP));
+ if(hadoopSaslQOP.ordinal() > saslQOP.ordinal()) {
+ LOG.warn(MessageFormat.format("\"hadoop.rpc.protection\" is set to higher security level " +
+ "{0} then {1} which is set to {2}", hadoopSaslQOP.toString(),
+ ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP.varname, saslQOP.toString()));
+ }
+ saslProps.put(Sasl.QOP, saslQOP.toString());
+ saslProps.put(Sasl.SERVER_AUTH, "true");
+ return saslProps;
+ }
+
public TTransportFactory getAuthTransFactory() throws LoginException {
TTransportFactory transportFactory;
if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
try {
- transportFactory = saslServer.createTransportFactory();
+ transportFactory = saslServer.createTransportFactory(getSaslProperties());
} catch (TTransportException e) {
throw new LoginException(e.getMessage());
}
diff --git a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
index 379dafb..519556c 100644
--- a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
+++ b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
@@ -18,6 +18,7 @@
package org.apache.hive.service.auth;
import java.io.IOException;
+import java.util.Map;
import javax.security.sasl.SaslException;
@@ -56,7 +57,7 @@ public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
}
public static TTransport getKerberosTransport(String principal, String host,
- final TTransport underlyingTransport) throws SaslException {
+ final TTransport underlyingTransport, Map saslProps) throws SaslException {
try {
final String names[] = principal.split("[/@]");
if (names.length != 3) {
@@ -67,7 +68,7 @@ public static TTransport getKerberosTransport(String principal, String host,
HadoopThriftAuthBridge.Client authBridge =
ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
return authBridge.createClientTransport(principal, host,
- "KERBEROS", null, underlyingTransport);
+ "KERBEROS", null, underlyingTransport, saslProps);
} catch (IOException e) {
throw new SaslException("Failed to open client transport", e);
}
diff --git a/service/src/java/org/apache/hive/service/auth/SaslQOP.java b/service/src/java/org/apache/hive/service/auth/SaslQOP.java
new file mode 100644
index 0000000..0b2e7a2
--- /dev/null
+++ b/service/src/java/org/apache/hive/service/auth/SaslQOP.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Possible values of SASL quality-of-protection value.
+ */
+public enum SaslQOP {
+ AUTH("auth"), // Authentication only.
+ AUTH_INT("auth-int"), // Authentication and integrity checking by using signatures.
+ AUTH_CONF("auth-conf"); // Authentication, integrity and confidentiality checking
+ // by using signatures and encryption.
+
+ public final String saslQop;
+
+ private static final Map strToEnum
+ = new HashMap();
+ static {
+ for (SaslQOP SaslQOP : values())
+ strToEnum.put(SaslQOP.toString(), SaslQOP);
+ }
+
+ private SaslQOP(final String saslQop) {
+ this.saslQop = saslQop;
+ }
+
+ public String toString() {
+ return saslQop;
+ }
+
+ public static SaslQOP fromString(String str) {
+ if(str != null) {
+ str = str.toLowerCase();
+ }
+ SaslQOP saslQOP = strToEnum.get(str);
+ if(saslQOP == null) {
+ throw new IllegalArgumentException("Unknown auth type: " + str + " Allowed values are: "
+ + strToEnum.keySet());
+ }
+ return saslQOP;
+ }
+}
diff --git a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
index 777226f..967bdb8 100644
--- a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
+++ b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
@@ -22,6 +22,7 @@
import java.net.Socket;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
+import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
@@ -92,20 +93,11 @@ public Server createServer(String keytabFile, String principalConf) throws TTran
}
public static class Client extends HadoopThriftAuthBridge.Client {
- /**
- * Create a client-side SASL transport that wraps an underlying transport.
- *
- * @param method The authentication method to use. Currently only KERBEROS is
- * supported.
- * @param serverPrincipal The Kerberos principal of the target server.
- * @param underlyingTransport The underlying transport mechanism, usually a TSocket.
- */
-
@Override
public TTransport createClientTransport(
- String principalConfig, String host,
- String methodStr, String tokenStrForm, TTransport underlyingTransport)
- throws IOException {
+ String principalConfig, String host,
+ String methodStr, String tokenStrForm, TTransport underlyingTransport,
+ Map saslProps) throws IOException {
AuthMethod method = AuthMethod.valueOf(AuthMethod.class, methodStr);
TTransport saslTransport = null;
@@ -114,11 +106,11 @@ public TTransport createClientTransport(
Token t= new Token();
t.decodeFromUrlString(tokenStrForm);
saslTransport = new TSaslClientTransport(
- method.getMechanismName(),
- null,
- null, SaslRpcServer.SASL_DEFAULT_REALM,
- SaslRpcServer.SASL_PROPS, new SaslClientCallbackHandler(t),
- underlyingTransport);
+ method.getMechanismName(),
+ null,
+ null, SaslRpcServer.SASL_DEFAULT_REALM,
+ saslProps, new SaslClientCallbackHandler(t),
+ underlyingTransport);
return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser());
case KERBEROS:
@@ -126,23 +118,23 @@ public TTransport createClientTransport(
String names[] = SaslRpcServer.splitKerberosName(serverPrincipal);
if (names.length != 3) {
throw new IOException(
- "Kerberos principal name does NOT have the expected hostname part: "
- + serverPrincipal);
+ "Kerberos principal name does NOT have the expected hostname part: "
+ + serverPrincipal);
}
try {
saslTransport = new TSaslClientTransport(
- method.getMechanismName(),
- null,
- names[0], names[1],
- SaslRpcServer.SASL_PROPS, null,
- underlyingTransport);
+ method.getMechanismName(),
+ null,
+ names[0], names[1],
+ saslProps, null,
+ underlyingTransport);
return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser());
} catch (SaslException se) {
throw new IOException("Could not instantiate SASL transport", se);
}
default:
- throw new IOException("Unsupported authentication method: " + method);
+ throw new IOException("Unsupported authentication method: " + method);
}
}
private static class SaslClientCallbackHandler implements CallbackHandler {
@@ -273,10 +265,10 @@ protected Server(String keytabFile, String principalConf)
* can be passed as both the input and output transport factory when
* instantiating a TThreadPoolServer, for example.
*
+ * @param saslProps Map of SASL properties
*/
@Override
- public TTransportFactory createTransportFactory() throws TTransportException
- {
+ public TTransportFactory createTransportFactory(Map saslProps) throws TTransportException {
// Parse out the kerberos principal, host, realm.
String kerberosName = realUgi.getUserName();
final String names[] = SaslRpcServer.splitKerberosName(kerberosName);
@@ -286,13 +278,13 @@ public TTransportFactory createTransportFactory() throws TTransportException
TSaslServerTransport.Factory transFactory = new TSaslServerTransport.Factory();
transFactory.addServerDefinition(
- AuthMethod.KERBEROS.getMechanismName(),
- names[0], names[1], // two parts of kerberos principal
- SaslRpcServer.SASL_PROPS,
- new SaslRpcServer.SaslGssCallbackHandler());
+ AuthMethod.KERBEROS.getMechanismName(),
+ names[0], names[1], // two parts of kerberos principal
+ saslProps,
+ new SaslRpcServer.SaslGssCallbackHandler());
transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(),
- null, SaslRpcServer.SASL_DEFAULT_REALM,
- SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler(secretManager));
+ null, SaslRpcServer.SASL_DEFAULT_REALM,
+ saslProps, new SaslDigestCallbackHandler(secretManager));
return new TUGIAssumingTransportFactory(transFactory, realUgi);
}
diff --git a/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java b/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
index 172e031..b518963 100644
--- a/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
+++ b/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
@@ -29,6 +29,7 @@
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
+import java.util.Map;
import junit.framework.TestCase;
@@ -72,13 +73,13 @@ public Server() throws TTransportException {
super();
}
@Override
- public TTransportFactory createTransportFactory()
+ public TTransportFactory createTransportFactory(Map saslProps)
throws TTransportException {
TSaslServerTransport.Factory transFactory =
new TSaslServerTransport.Factory();
transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(),
null, SaslRpcServer.SASL_DEFAULT_REALM,
- SaslRpcServer.SASL_PROPS,
+ saslProps,
new SaslDigestCallbackHandler(secretManager));
return new TUGIAssumingTransportFactory(transFactory, realUgi);
diff --git a/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
index 9b0ec0a..b8758ea 100644
--- a/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
+++ b/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
@@ -16,17 +16,18 @@
* limitations under the License.
*/
- package org.apache.hadoop.hive.thrift;
+package org.apache.hadoop.hive.thrift;
- import java.io.IOException;
+import java.io.IOException;
import java.net.InetAddress;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.thrift.TProcessor;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
- /**
+/**
* This class is only overridden by the secure hadoop shim. It allows
* the Thrift SASL support to bridge to Hadoop's UserGroupInformation
* & DelegationToken infrastructure.
@@ -50,27 +51,28 @@ public Server createServer(String keytabFile, String principalConf)
public static abstract class Client {
- /**
- *
- * @param principalConfig In the case of Kerberos authentication this will
- * be the kerberos principal name, for DIGEST-MD5 (delegation token) based
- * authentication this will be null
- * @param host The metastore server host name
- * @param methodStr "KERBEROS" or "DIGEST"
- * @param tokenStrForm This is url encoded string form of
- * org.apache.hadoop.security.token.
- * @param underlyingTransport the underlying transport
- * @return the transport
- * @throws IOException
- */
+ /**
+ *
+ * @param principalConfig In the case of Kerberos authentication this will
+ * be the kerberos principal name, for DIGEST-MD5 (delegation token) based
+ * authentication this will be null
+ * @param host The metastore server host name
+ * @param methodStr "KERBEROS" or "DIGEST"
+ * @param tokenStrForm This is url encoded string form of
+ * org.apache.hadoop.security.token.
+ * @param underlyingTransport the underlying transport
+ * @return the transport
+ * @throws IOException
+ */
public abstract TTransport createClientTransport(
- String principalConfig, String host,
- String methodStr,String tokenStrForm, TTransport underlyingTransport)
- throws IOException;
+ String principalConfig, String host,
+ String methodStr, String tokenStrForm, TTransport underlyingTransport,
+ Map saslProps)
+ throws IOException;
}
public static abstract class Server {
- public abstract TTransportFactory createTransportFactory() throws TTransportException;
+ public abstract TTransportFactory createTransportFactory(Map saslProps) throws TTransportException;
public abstract TProcessor wrapProcessor(TProcessor processor);
public abstract TProcessor wrapNonAssumingProcessor(TProcessor processor);
public abstract InetAddress getRemoteAddress();