diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 555343e..4cca3e1 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -727,6 +727,7 @@ HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000), HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""), + HIVE_SERVER2_THRIFT_SASL_QOP("hive.server2.thrift.sasl.qop", "auth"), // HiveServer2 auth configuration diff --git a/conf/hive-default.xml.template b/conf/hive-default.xml.template index f01e715..d8737e3 100644 --- a/conf/hive-default.xml.template +++ b/conf/hive-default.xml.template @@ -1919,6 +1919,11 @@ + + hive.server2.thrift.sasl.qop + auth + Sasl QOP value; one of 'auth', 'auth-int' and 'auth-conf' + diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 00f4351..9fbc8ad 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -33,6 +33,7 @@ import java.sql.Savepoint; import java.sql.Statement; import java.sql.Struct; +import java.util.HashMap; import java.util.concurrent.Executor; import java.util.LinkedList; import java.util.List; @@ -40,11 +41,13 @@ import java.util.Map.Entry; import java.util.Properties; +import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.service.auth.KerberosSaslHelper; import org.apache.hive.service.auth.PlainSaslHelper; +import org.apache.hive.service.auth.SaslQOP; import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService; import org.apache.hive.service.cli.thrift.TCLIService; import org.apache.hive.service.cli.thrift.TCloseSessionReq; @@ -65,6 +68,7 @@ */ public class HiveConnection implements java.sql.Connection { private static final String HIVE_AUTH_TYPE= "auth"; + private static final String HIVE_AUTH_QOP = "sasl.qop"; private static final String HIVE_AUTH_SIMPLE = "noSasl"; private static final String HIVE_AUTH_USER = "user"; private static final String HIVE_AUTH_PRINCIPAL = "principal"; @@ -132,8 +136,19 @@ private void openTransport(String uri, String host, int port, Map saslProps = new HashMap(); + SaslQOP saslQOP = SaslQOP.AUTH; + if(sessConf.containsKey(HIVE_AUTH_QOP)) { + try { + saslQOP = SaslQOP.fromString(sessConf.get(HIVE_AUTH_QOP)); + } catch (IllegalArgumentException e) { + throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), "42000", e); + } + } + saslProps.put(Sasl.QOP, saslQOP.toString()); + saslProps.put(Sasl.SERVER_AUTH, "true"); transport = KerberosSaslHelper.getKerberosTransport( - sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport); + sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps); } else { String userName = sessConf.get(HIVE_AUTH_USER); if ((userName == null) || userName.isEmpty()) { diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index cde58c2..591f7d5 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -4376,7 +4376,8 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, // start delegation token manager HMSHandler hmsHandler = new HMSHandler("new db based metaserver", conf); saslServer.startDelegationTokenSecretManager(conf, hmsHandler); - transFactory = saslServer.createTransportFactory(); + transFactory = saslServer.createTransportFactory( + MetaStoreUtils.getMetaStoreSaslProperties(conf)); processor = saslServer.wrapProcessor( new ThriftHiveMetastore.Processor(hmsHandler)); LOG.info("Starting DB backed MetaStore Server in Secure Mode"); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index cef50f4..e165b2d 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -258,17 +258,17 @@ private void open() throws MetaException { String tokenSig = conf.get("hive.metastore.token.signature"); // tokenSig could be null tokenStrForm = shim.getTokenStrForm(tokenSig); - if(tokenStrForm != null) { // authenticate using delegation tokens via the "DIGEST" mechanism transport = authBridge.createClientTransport(null, store.getHost(), - "DIGEST", tokenStrForm, transport); + "DIGEST", tokenStrForm, transport, + MetaStoreUtils.getMetaStoreSaslProperties(conf)); } else { String principalConfig = conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); transport = authBridge.createClientTransport( principalConfig, store.getHost(), "KERBEROS", null, - transport); + transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); } } catch (IOException ioe) { LOG.error("Couldn't create client transport", ioe); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 88151a1..15a2a81 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -64,9 +64,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge; - public class MetaStoreUtils { protected static final Log LOG = LogFactory.getLog("hive.log"); @@ -1264,6 +1264,18 @@ public static boolean compareFieldColumns(List schema1, List getMetaStoreSaslProperties(HiveConf conf) { + // As of now Hive Meta Store uses the same configuration as Hadoop SASL configuration + return ShimLoader.getHadoopThriftAuthBridge().getHadoopSaslProperties(conf); + } + + private static String getPartitionValWithInvalidCharacter(List partVals, Pattern partitionValidationPattern) { if (partitionValidationPattern == null) { diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index 1809e1b..5a66a6c 100644 --- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -20,6 +20,7 @@ import java.io.IOException; import javax.security.auth.login.LoginException; +import javax.security.sasl.Sasl; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -29,8 +30,15 @@ import org.apache.thrift.TProcessorFactory; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; public class HiveAuthFactory { + private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class); public static enum AuthTypes { NOSASL("NOSASL"), @@ -71,13 +79,32 @@ public HiveAuthFactory() throws TTransportException { } } + public Map getSaslProperties() { + Map saslProps = new HashMap(); + SaslQOP saslQOP = + SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP)); + // hadoop.rpc.protection being set to a higher level than hive.server2.thrift.rpc.protection + // does not make sense in most situations. Log warning message in such cases. + Map hadoopSaslProps = ShimLoader.getHadoopThriftAuthBridge(). + getHadoopSaslProperties(conf); + SaslQOP hadoopSaslQOP = SaslQOP.fromString(hadoopSaslProps.get(Sasl.QOP)); + if(hadoopSaslQOP.ordinal() > saslQOP.ordinal()) { + LOG.warn(MessageFormat.format("\"hadoop.rpc.protection\" is set to higher security level " + + "{0} then {1} which is set to {2}", hadoopSaslQOP.toString(), + ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP.varname, saslQOP.toString())); + } + saslProps.put(Sasl.QOP, saslQOP.toString()); + saslProps.put(Sasl.SERVER_AUTH, "true"); + return saslProps; + } + public TTransportFactory getAuthTransFactory() throws LoginException { TTransportFactory transportFactory; if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) { try { - transportFactory = saslServer.createTransportFactory(); + transportFactory = saslServer.createTransportFactory(getSaslProperties()); } catch (TTransportException e) { throw new LoginException(e.getMessage()); } diff --git a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java index 379dafb..519556c 100644 --- a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java +++ b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java @@ -18,6 +18,7 @@ package org.apache.hive.service.auth; import java.io.IOException; +import java.util.Map; import javax.security.sasl.SaslException; @@ -56,7 +57,7 @@ public static TProcessorFactory getKerberosProcessorFactory(Server saslServer, } public static TTransport getKerberosTransport(String principal, String host, - final TTransport underlyingTransport) throws SaslException { + final TTransport underlyingTransport, Map saslProps) throws SaslException { try { final String names[] = principal.split("[/@]"); if (names.length != 3) { @@ -67,7 +68,7 @@ public static TTransport getKerberosTransport(String principal, String host, HadoopThriftAuthBridge.Client authBridge = ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos"); return authBridge.createClientTransport(principal, host, - "KERBEROS", null, underlyingTransport); + "KERBEROS", null, underlyingTransport, saslProps); } catch (IOException e) { throw new SaslException("Failed to open client transport", e); } diff --git a/service/src/java/org/apache/hive/service/auth/SaslQOP.java b/service/src/java/org/apache/hive/service/auth/SaslQOP.java new file mode 100644 index 0000000..0b2e7a2 --- /dev/null +++ b/service/src/java/org/apache/hive/service/auth/SaslQOP.java @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.auth; + +import java.util.HashMap; +import java.util.Map; + +/** + * Possible values of SASL quality-of-protection value. + */ +public enum SaslQOP { + AUTH("auth"), // Authentication only. + AUTH_INT("auth-int"), // Authentication and integrity checking by using signatures. + AUTH_CONF("auth-conf"); // Authentication, integrity and confidentiality checking + // by using signatures and encryption. + + public final String saslQop; + + private static final Map strToEnum + = new HashMap(); + static { + for (SaslQOP SaslQOP : values()) + strToEnum.put(SaslQOP.toString(), SaslQOP); + } + + private SaslQOP(final String saslQop) { + this.saslQop = saslQop; + } + + public String toString() { + return saslQop; + } + + public static SaslQOP fromString(String str) { + if(str != null) { + str = str.toLowerCase(); + } + SaslQOP saslQOP = strToEnum.get(str); + if(saslQOP == null) { + throw new IllegalArgumentException("Unknown auth type: " + str + " Allowed values are: " + + strToEnum.keySet()); + } + return saslQOP; + } +} diff --git a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java index 1df6993..dc89de1 100644 --- a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java +++ b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java @@ -24,6 +24,7 @@ import java.net.Socket; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; +import java.util.Map; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; @@ -89,6 +90,19 @@ public Server createServer(String keytabFile, String principalConf) throws TTran return new Server(keytabFile, principalConf); } + /** + * Read and return Hadoop SASL configuration which can be configured using + * "hadoop.rpc.protection" + * @param conf + * @return Hadoop SASL configuration + */ + @Override + public Map getHadoopSaslProperties(Configuration conf) { + // Initialize the SaslRpcServer to ensure QOP parameters are read from conf + SaslRpcServer.init(conf); + return SaslRpcServer.SASL_PROPS; + } + public static class Client extends HadoopThriftAuthBridge.Client { /** * Create a client-side SASL transport that wraps an underlying transport. @@ -97,13 +111,14 @@ public Server createServer(String keytabFile, String principalConf) throws TTran * supported. * @param serverPrincipal The Kerberos principal of the target server. * @param underlyingTransport The underlying transport mechanism, usually a TSocket. + * @param saslProps the sasl properties to create the client with */ @Override public TTransport createClientTransport( String principalConfig, String host, - String methodStr, String tokenStrForm, TTransport underlyingTransport) - throws IOException { + String methodStr, String tokenStrForm, TTransport underlyingTransport, + Map saslProps) throws IOException { AuthMethod method = AuthMethod.valueOf(AuthMethod.class, methodStr); TTransport saslTransport = null; @@ -115,7 +130,7 @@ public TTransport createClientTransport( method.getMechanismName(), null, null, SaslRpcServer.SASL_DEFAULT_REALM, - SaslRpcServer.SASL_PROPS, new SaslClientCallbackHandler(t), + saslProps, new SaslClientCallbackHandler(t), underlyingTransport); return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser()); @@ -132,7 +147,7 @@ public TTransport createClientTransport( method.getMechanismName(), null, names[0], names[1], - SaslRpcServer.SASL_PROPS, null, + saslProps, null, underlyingTransport); return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser()); } catch (SaslException se) { @@ -140,7 +155,7 @@ public TTransport createClientTransport( } default: - throw new IOException("Unsupported authentication method: " + method); + throw new IOException("Unsupported authentication method: " + method); } } private static class SaslClientCallbackHandler implements CallbackHandler { @@ -271,10 +286,11 @@ protected Server(String keytabFile, String principalConf) * can be passed as both the input and output transport factory when * instantiating a TThreadPoolServer, for example. * + * @param saslProps Map of SASL properties */ @Override - public TTransportFactory createTransportFactory() throws TTransportException - { + public TTransportFactory createTransportFactory(Map saslProps) + throws TTransportException { // Parse out the kerberos principal, host, realm. String kerberosName = realUgi.getUserName(); final String names[] = SaslRpcServer.splitKerberosName(kerberosName); @@ -286,11 +302,11 @@ public TTransportFactory createTransportFactory() throws TTransportException transFactory.addServerDefinition( AuthMethod.KERBEROS.getMechanismName(), names[0], names[1], // two parts of kerberos principal - SaslRpcServer.SASL_PROPS, + saslProps, new SaslRpcServer.SaslGssCallbackHandler()); transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM, - SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler(secretManager)); + saslProps, new SaslDigestCallbackHandler(secretManager)); return new TUGIAssumingTransportFactory(transFactory, realUgi); } diff --git a/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java b/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java index 3e850ec..7ac7ebc 100644 --- a/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java +++ b/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java @@ -29,6 +29,7 @@ import java.util.Arrays; import java.util.Enumeration; import java.util.List; +import java.util.Map; import junit.framework.TestCase; @@ -77,13 +78,13 @@ public Server() throws TTransportException { super(); } @Override - public TTransportFactory createTransportFactory() + public TTransportFactory createTransportFactory(Map saslProps) throws TTransportException { TSaslServerTransport.Factory transFactory = new TSaslServerTransport.Factory(); transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM, - SaslRpcServer.SASL_PROPS, + saslProps, new SaslDigestCallbackHandler(secretManager)); return new TUGIAssumingTransportFactory(transFactory, realUgi); diff --git a/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index ab7f5c0..b5f4561 100644 --- a/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ b/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.net.InetAddress; +import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.thrift.TProcessor; @@ -50,6 +51,18 @@ public Server createServer(String keytabFile, String principalConf) } + /** + * Read and return Hadoop SASL configuration which can be configured using + * "hadoop.rpc.protection" + * + * @param conf + * @return Hadoop SASL configuration + */ + public Map getHadoopSaslProperties(Configuration conf) { + throw new UnsupportedOperationException( + "The current version of Hadoop does not support Authentication"); + } + public static abstract class Client { /** * @@ -65,13 +78,14 @@ public Server createServer(String keytabFile, String principalConf) * @throws IOException */ public abstract TTransport createClientTransport( - String principalConfig, String host, - String methodStr,String tokenStrForm, TTransport underlyingTransport) - throws IOException; + String principalConfig, String host, + String methodStr, String tokenStrForm, TTransport underlyingTransport, + Map saslProps) + throws IOException; } public static abstract class Server { - public abstract TTransportFactory createTransportFactory() throws TTransportException; + public abstract TTransportFactory createTransportFactory(Map saslProps) throws TTransportException; public abstract TProcessor wrapProcessor(TProcessor processor); public abstract TProcessor wrapNonAssumingProcessor(TProcessor processor); public abstract InetAddress getRemoteAddress();