diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java index 0f0ae54..086c383 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/JdbcWithMiniKdcSQLAuthTest.java @@ -59,7 +59,7 @@ public static void beforeTestBase() throws Exception { hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); - miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf).build(); miniHS2.start(new HashMap()); } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java index bbec37e..ad96cbd 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java @@ -170,7 +170,7 @@ public String getDefaultUserPrincipal() { * @return new MiniHS2 instance * @throws Exception */ - public static MiniHS2 getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveConf) throws Exception { + public static MiniHS2.Builder getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveConf) throws Exception { return getMiniHS2WithKerb(miniHiveKdc, hiveConf, AUTHENTICATION_TYPE); } @@ -182,7 +182,7 @@ public static MiniHS2 getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveC * @return new MiniHS2 instance * @throws Exception */ - public static MiniHS2 getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveConf, + public static MiniHS2.Builder getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveConf, String authType) throws Exception { String hivePrincipal = miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL); @@ -190,7 +190,7 @@ public static MiniHS2 getMiniHS2WithKerb(MiniHiveKdc miniHiveKdc, HiveConf hiveC miniHiveKdc.getServicePrincipalForUser(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL)); return new MiniHS2.Builder().withConf(hiveConf).withMiniKdc(hivePrincipal, hiveKeytab). - withAuthenticationType(authType).build(); + withAuthenticationType(authType); } /** diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java index 4fabe47..698aea1 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java @@ -57,7 +57,7 @@ public static void setUpBeforeClass() throws Exception { confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "" + Boolean.FALSE); HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); - miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf).build(); miniHS2.start(confOverlay); } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java index daf0f7e..1cf6439 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java @@ -70,7 +70,7 @@ public static void beforeTest() throws Exception { HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); - miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf).build(); miniHS2.start(confOverlay); } diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java index 5e70d68..adb505c 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdcCookie.java @@ -57,7 +57,7 @@ public static void beforeTest() throws Exception { 1, TimeUnit.SECONDS); hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); - miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf).build(); miniHS2.start(new HashMap()); } @@ -95,7 +95,7 @@ public void testCookie() throws Exception { // run a query in a loop so that we hit a 401 occasionally for (int i = 0; i < 10; i++) { - stmt.execute("select * from " + tableName ); + stmt.executeQuery("select * from " + tableName ).next(); } stmt.execute("drop table " + tableName); stmt.close(); diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java new file mode 100644 index 0000000..ad82649 --- /dev/null +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestSSLWithMiniKdc.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.minikdc; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.Statement; +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.hadoop.hive.jdbc.SSLTestUtils; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + + +public class TestSSLWithMiniKdc { + + private static MiniHS2 miniHS2 = null; + private static MiniHiveKdc miniHiveKdc = null; + private static HiveConf hiveConf = new HiveConf(); + + @BeforeClass + public static void beforeTest() throws Exception { + Class.forName(MiniHS2.getJdbcDriverName()); + + SSLTestUtils.setMetastoreSslConf(hiveConf); + hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf).withRemoteMetastore().build(); + + Map confOverlay = new HashMap<>(); + SSLTestUtils.setHttpConfOverlay(confOverlay); + SSLTestUtils.setSslConfOverlay(confOverlay); + + miniHS2.start(confOverlay); + } + + @AfterClass + public static void afterTest() throws Exception { + miniHS2.stop(); + } + + @Test + public void testConnection() throws Exception { + String tableName = "testTable"; + Path dataFilePath = new Path(hiveConf.get("test.data.files"), "kv1.txt"); + Connection hs2Conn = getConnection(MiniHiveKdc.HIVE_TEST_USER_1); + + Statement stmt = hs2Conn.createStatement(); + + SSLTestUtils.setupTestTableWithData(tableName, dataFilePath, hs2Conn); + + stmt.execute("select * from " + tableName); + stmt.execute("drop table " + tableName); + stmt.close(); + } + + private Connection getConnection(String userName) throws Exception { + miniHiveKdc.loginUser(userName); + return DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), + System.getProperty("user.name"), "bar"); + } +} diff --git a/itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java b/itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java new file mode 100644 index 0000000..a739379 --- /dev/null +++ b/itests/hive-unit/src/main/java/org/hadoop/hive/jdbc/SSLTestUtils.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.hadoop.hive.jdbc; + +import java.io.File; +import java.net.URLEncoder; +import java.sql.Connection; +import java.sql.Statement; +import java.util.Map; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; + + +public class SSLTestUtils { + + private static final String LOCALHOST_KEY_STORE_NAME = "keystore.jks"; + private static final String TRUST_STORE_NAME = "truststore.jks"; + private static final String KEY_STORE_TRUST_STORE_PASSWORD = "HiveJdbc"; + private static final String HS2_BINARY_MODE = "binary"; + private static final String HS2_HTTP_MODE = "http"; + private static final String HS2_HTTP_ENDPOINT = "cliservice"; + private static final String HS2_BINARY_AUTH_MODE = "NONE"; + + private static final HiveConf conf = new HiveConf(); + private static final String dataFileDir = !System.getProperty("test.data.files", "").isEmpty() ? System.getProperty( + "test.data.files") : conf.get("test.data.files").replace('\\', '/').replace("c:", ""); + + public static final String SSL_CONN_PARAMS = "ssl=true;sslTrustStore=" + + URLEncoder.encode(dataFileDir + File.separator + TRUST_STORE_NAME) + ";trustStorePassword=" + + KEY_STORE_TRUST_STORE_PASSWORD; + + public static void setSslConfOverlay(Map confOverlay) { + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_USE_SSL.varname, "true"); + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname, + dataFileDir + File.separator + LOCALHOST_KEY_STORE_NAME); + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, + KEY_STORE_TRUST_STORE_PASSWORD); + } + + public static void setMetastoreSslConf(HiveConf conf) { + conf.setBoolVar(HiveConf.ConfVars.HIVE_METASTORE_USE_SSL, true); + conf.setVar(HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH, + dataFileDir + File.separator + LOCALHOST_KEY_STORE_NAME); + conf.setVar(HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD, + KEY_STORE_TRUST_STORE_PASSWORD); + conf.setVar(HiveConf.ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH, + dataFileDir + File.separator + TRUST_STORE_NAME); + conf.setVar(HiveConf.ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD, + KEY_STORE_TRUST_STORE_PASSWORD); + } + + public static void clearSslConfOverlay(Map confOverlay) { + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_USE_SSL.varname, "false"); + } + + public static void setHttpConfOverlay(Map confOverlay) { + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_HTTP_MODE); + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, HS2_HTTP_ENDPOINT); + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true"); + } + + public static void setBinaryConfOverlay(Map confOverlay) { + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_BINARY_MODE); + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, HS2_BINARY_AUTH_MODE); + confOverlay.put(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true"); + } + + public static void setupTestTableWithData(String tableName, Path dataFilePath, + Connection hs2Conn) throws Exception { + Statement stmt = hs2Conn.createStatement(); + stmt.execute("set hive.support.concurrency = false"); + + stmt.execute("drop table if exists " + tableName); + stmt.execute("create table " + tableName + + " (under_col int comment 'the under column', value string)"); + + // load data + stmt.execute("load data local inpath '" + + dataFilePath.toString() + "' into table " + tableName); + stmt.close(); + } +} diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java index 0a53259..32da0c89 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.hadoop.hive.jdbc.SSLTestUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -55,19 +56,12 @@ private static final String KEY_STORE_TRUST_STORE_PASSWORD = "HiveJdbc"; private static final String JAVA_TRUST_STORE_PROP = "javax.net.ssl.trustStore"; private static final String JAVA_TRUST_STORE_PASS_PROP = "javax.net.ssl.trustStorePassword"; - private static final String HS2_BINARY_MODE = "binary"; - private static final String HS2_HTTP_MODE = "http"; - private static final String HS2_HTTP_ENDPOINT = "cliservice"; - private static final String HS2_BINARY_AUTH_MODE = "NONE"; private MiniHS2 miniHS2 = null; private static HiveConf conf = new HiveConf(); private Connection hs2Conn = null; private String dataFileDir = conf.get("test.data.files"); private Map confOverlay; - private final String SSL_CONN_PARAMS = "ssl=true;sslTrustStore=" - + URLEncoder.encode(dataFileDir + File.separator + TRUST_STORE_NAME) + ";trustStorePassword=" - + KEY_STORE_TRUST_STORE_PASSWORD; @BeforeClass public static void beforeTest() throws Exception { @@ -83,10 +77,6 @@ public static void afterClass() throws Exception { @Before public void setUp() throws Exception { DriverManager.setLoginTimeout(0); - if (!System.getProperty("test.data.files", "").isEmpty()) { - dataFileDir = System.getProperty("test.data.files"); - } - dataFileDir = dataFileDir.replace('\\', '/').replace("c:", ""); miniHS2 = new MiniHS2.Builder().withConf(conf).cleanupLocalDirOnStartup(false).build(); confOverlay = new HashMap(); } @@ -127,9 +117,9 @@ public void testSSLVersion() throws Exception { // we depend on linux openssl exit codes Assume.assumeTrue(System.getProperty("os.name").toLowerCase().contains("linux")); - setSslConfOverlay(confOverlay); + SSLTestUtils.setSslConfOverlay(confOverlay); // Test in binary mode - setBinaryConfOverlay(confOverlay); + SSLTestUtils.setBinaryConfOverlay(confOverlay); // Start HS2 with SSL miniHS2.start(confOverlay); @@ -146,7 +136,7 @@ public void testSSLVersion() throws Exception { miniHS2.stop(); // Test in http mode - setHttpConfOverlay(confOverlay); + SSLTestUtils.setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); // make SSL connection try { @@ -175,13 +165,13 @@ public void testSSLVersion() throws Exception { */ @Test public void testInvalidConfig() throws Exception { - clearSslConfOverlay(confOverlay); + SSLTestUtils.clearSslConfOverlay(confOverlay); // Test in binary mode - setBinaryConfOverlay(confOverlay); + SSLTestUtils.setBinaryConfOverlay(confOverlay); miniHS2.start(confOverlay); DriverManager.setLoginTimeout(4); try { - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); fail("SSL connection should fail with NON-SSL server"); } catch (SQLException e) { @@ -204,10 +194,10 @@ public void testInvalidConfig() throws Exception { // Test in http mode with ssl properties specified in url System.clearProperty(JAVA_TRUST_STORE_PROP); System.clearProperty(JAVA_TRUST_STORE_PASS_PROP); - setHttpConfOverlay(confOverlay); + SSLTestUtils.setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); try { - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); fail("SSL connection should fail with NON-SSL server"); } catch (SQLException e) { @@ -223,9 +213,9 @@ public void testInvalidConfig() throws Exception { */ @Test public void testConnectionMismatch() throws Exception { - setSslConfOverlay(confOverlay); + SSLTestUtils.setSslConfOverlay(confOverlay); // Test in binary mode - setBinaryConfOverlay(confOverlay); + SSLTestUtils.setBinaryConfOverlay(confOverlay); miniHS2.start(confOverlay); // Start HS2 with SSL try { @@ -247,7 +237,7 @@ public void testConnectionMismatch() throws Exception { miniHS2.stop(); // Test in http mode - setHttpConfOverlay(confOverlay); + SSLTestUtils.setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); try { hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", ";ssl=false"), @@ -266,23 +256,23 @@ public void testConnectionMismatch() throws Exception { */ @Test public void testSSLConnectionWithURL() throws Exception { - setSslConfOverlay(confOverlay); + SSLTestUtils.setSslConfOverlay(confOverlay); // Test in binary mode - setBinaryConfOverlay(confOverlay); + SSLTestUtils.setBinaryConfOverlay(confOverlay); // Start HS2 with SSL miniHS2.start(confOverlay); // make SSL connection - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); hs2Conn.close(); miniHS2.stop(); // Test in http mode - setHttpConfOverlay(confOverlay); + SSLTestUtils.setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); // make SSL connection - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); hs2Conn.close(); } @@ -293,9 +283,9 @@ public void testSSLConnectionWithURL() throws Exception { */ @Test public void testSSLConnectionWithProperty() throws Exception { - setSslConfOverlay(confOverlay); + SSLTestUtils.setSslConfOverlay(confOverlay); // Test in binary mode - setBinaryConfOverlay(confOverlay); + SSLTestUtils.setBinaryConfOverlay(confOverlay); // Start HS2 with SSL miniHS2.start(confOverlay); @@ -308,10 +298,10 @@ public void testSSLConnectionWithProperty() throws Exception { miniHS2.stop(); // Test in http mode - setHttpConfOverlay(confOverlay); + SSLTestUtils.setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); // make SSL connection - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default",SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); hs2Conn.close(); } @@ -322,9 +312,9 @@ public void testSSLConnectionWithProperty() throws Exception { */ @Test public void testSSLFetch() throws Exception { - setSslConfOverlay(confOverlay); + SSLTestUtils.setSslConfOverlay(confOverlay); // Test in binary mode - setBinaryConfOverlay(confOverlay); + SSLTestUtils.setBinaryConfOverlay(confOverlay); // Start HS2 with SSL miniHS2.start(confOverlay); @@ -332,11 +322,11 @@ public void testSSLFetch() throws Exception { Path dataFilePath = new Path(dataFileDir, "kv1.txt"); // make SSL connection - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); // Set up test data - setupTestTableWithData(tableName, dataFilePath, hs2Conn); + SSLTestUtils.setupTestTableWithData(tableName, dataFilePath, hs2Conn); Statement stmt = hs2Conn.createStatement(); ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName); @@ -357,20 +347,20 @@ public void testSSLFetch() throws Exception { */ @Test public void testSSLFetchHttp() throws Exception { - setSslConfOverlay(confOverlay); + SSLTestUtils.setSslConfOverlay(confOverlay); // Test in http mode - setHttpConfOverlay(confOverlay); + SSLTestUtils.setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); String tableName = "sslTab"; Path dataFilePath = new Path(dataFileDir, "kv1.txt"); // make SSL connection - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); // Set up test data - setupTestTableWithData(tableName, dataFilePath, hs2Conn); + SSLTestUtils.setupTestTableWithData(tableName, dataFilePath, hs2Conn); Statement stmt = hs2Conn.createStatement(); ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName); int rowCount = 0; @@ -393,16 +383,16 @@ public void testSSLFetchHttp() throws Exception { @Test public void testConnectionWrongCertCN() throws Exception { // This call sets the default ssl params including the correct keystore in the server config - setSslConfOverlay(confOverlay); + SSLTestUtils.setSslConfOverlay(confOverlay); // Replace default keystore with keystore for www.example.com confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname, dataFileDir + File.separator + EXAMPLEDOTCOM_KEY_STORE_NAME); // Binary (TCP) mode - setBinaryConfOverlay(confOverlay); + SSLTestUtils.setBinaryConfOverlay(confOverlay); miniHS2.start(confOverlay); try { hs2Conn = - DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); fail("SSL connection, with the server providing wrong certifcate (with CN www.example.com, " + "instead of localhost), should fail"); @@ -415,11 +405,11 @@ public void testConnectionWrongCertCN() throws Exception { miniHS2.stop(); // Http mode - setHttpConfOverlay(confOverlay); + SSLTestUtils.setHttpConfOverlay(confOverlay); miniHS2.start(confOverlay); try { hs2Conn = - DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); fail("SSL connection, with the server providing wrong certifcate (with CN www.example.com, " + "instead of localhost), should fail"); @@ -439,10 +429,10 @@ public void testConnectionWrongCertCN() throws Exception { */ @Test public void testMetastoreWithSSL() throws Exception { - setMetastoreSslConf(conf); - setSslConfOverlay(confOverlay); + SSLTestUtils.setMetastoreSslConf(conf); + SSLTestUtils.setSslConfOverlay(confOverlay); // Test in http mode - setHttpConfOverlay(confOverlay); + SSLTestUtils.setHttpConfOverlay(confOverlay); miniHS2 = new MiniHS2.Builder().withRemoteMetastore().withConf(conf).cleanupLocalDirOnStartup(false).build(); miniHS2.start(confOverlay); @@ -450,11 +440,11 @@ public void testMetastoreWithSSL() throws Exception { Path dataFilePath = new Path(dataFileDir, "kv1.txt"); // make SSL connection - hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSL_CONN_PARAMS), + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default", SSLTestUtils.SSL_CONN_PARAMS), System.getProperty("user.name"), "bar"); // Set up test data - setupTestTableWithData(tableName, dataFilePath, hs2Conn); + SSLTestUtils.setupTestTableWithData(tableName, dataFilePath, hs2Conn); Statement stmt = hs2Conn.createStatement(); ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName); int rowCount = 0; @@ -474,7 +464,7 @@ public void testMetastoreWithSSL() throws Exception { */ @Test public void testMetastoreConnectionWrongCertCN() throws Exception { - setMetastoreSslConf(conf); + SSLTestUtils.setMetastoreSslConf(conf); conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH, dataFileDir + File.separator + EXAMPLEDOTCOM_KEY_STORE_NAME); miniHS2 = new MiniHS2.Builder().withRemoteMetastore().withConf(conf).cleanupLocalDirOnStartup(false).build(); @@ -486,55 +476,4 @@ public void testMetastoreConnectionWrongCertCN() throws Exception { miniHS2.stop(); } - - private void setupTestTableWithData(String tableName, Path dataFilePath, - Connection hs2Conn) throws Exception { - Statement stmt = hs2Conn.createStatement(); - stmt.execute("set hive.support.concurrency = false"); - - stmt.execute("drop table if exists " + tableName); - stmt.execute("create table " + tableName - + " (under_col int comment 'the under column', value string)"); - - // load data - stmt.execute("load data local inpath '" - + dataFilePath.toString() + "' into table " + tableName); - stmt.close(); - } - - private void setSslConfOverlay(Map confOverlay) { - confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true"); - confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname, - dataFileDir + File.separator + LOCALHOST_KEY_STORE_NAME); - confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, - KEY_STORE_TRUST_STORE_PASSWORD); - } - - private void setMetastoreSslConf(HiveConf conf) { - conf.setBoolVar(ConfVars.HIVE_METASTORE_USE_SSL, true); - conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH, - dataFileDir + File.separator + LOCALHOST_KEY_STORE_NAME); - conf.setVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD, - KEY_STORE_TRUST_STORE_PASSWORD); - conf.setVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH, - dataFileDir + File.separator + TRUST_STORE_NAME); - conf.setVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD, - KEY_STORE_TRUST_STORE_PASSWORD); - } - - private void clearSslConfOverlay(Map confOverlay) { - confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "false"); - } - - private void setHttpConfOverlay(Map confOverlay) { - confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_HTTP_MODE); - confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, HS2_HTTP_ENDPOINT); - confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true"); - } - - private void setBinaryConfOverlay(Map confOverlay) { - confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_BINARY_MODE); - confOverlay.put(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, HS2_BINARY_AUTH_MODE); - confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true"); - } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 80b1e98..1d05fe1 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -7081,10 +7081,9 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, ServerMode.METASTORE); saslServer.setSecretManager(delegationTokenManager.getSecretManager()); transFactory = saslServer.createTransportFactory( - MetaStoreUtils.getMetaStoreSaslProperties(conf)); + MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); processor = saslServer.wrapProcessor( new ThriftHiveMetastore.Processor(handler)); - serverSocket = HiveAuthUtils.getServerSocket(null, port); LOG.info("Starting DB backed MetaStore Server in Secure Mode"); } else { @@ -7103,25 +7102,27 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, processor = new TSetIpAddressProcessor(handler); LOG.info("Starting DB backed MetaStore Server"); } + } + + if (!useSSL) { + serverSocket = HiveAuthUtils.getServerSocket(null, port); + } else { + String keyStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH).trim(); + if (keyStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH.varname + + " Not configured for SSL connection"); + } + String keyStorePassword = ShimLoader.getHadoopShims().getPassword(conf, + HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname); // enable SSL support for HMS List sslVersionBlacklist = new ArrayList(); for (String sslVersion : conf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) { sslVersionBlacklist.add(sslVersion); } - if (!useSSL) { - serverSocket = HiveAuthUtils.getServerSocket(null, port); - } else { - String keyStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH).trim(); - if (keyStorePath.isEmpty()) { - throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname - + " Not configured for SSL connection"); - } - String keyStorePassword = ShimLoader.getHadoopShims().getPassword(conf, - HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname); - serverSocket = HiveAuthUtils.getServerSSLSocket(null, port, keyStorePath, - keyStorePassword, sslVersionBlacklist); - } + + serverSocket = HiveAuthUtils.getServerSSLSocket(null, port, keyStorePath, + keyStorePassword, sslVersionBlacklist); } if (tcpKeepAlive) { @@ -7183,6 +7184,7 @@ public void processContext(ServerContext serverContext, TTransport tTransport, T HMSHandler.LOG.info("Options.maxWorkerThreads = " + maxWorkerThreads); HMSHandler.LOG.info("TCP keepalive = " + tcpKeepAlive); + HMSHandler.LOG.info("Enable SSL = " + useSSL); if (startLock != null) { signalOtherThreadsToStart(tServer, startLock, startCondition, startedServing); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 7002620..dcb14e8 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -392,6 +392,29 @@ private void open() throws MetaException { LOG.info("Trying to connect to metastore with URI " + store); try { + if (useSSL) { + try { + String trustStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH).trim(); + if (trustStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH.varname + + " Not configured for SSL connection"); + } + String trustStorePassword = ShimLoader.getHadoopShims().getPassword(conf, + HiveConf.ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD.varname); + + // Create an SSL socket and connect + transport = HiveAuthUtils.getSSLSocket(store.getHost(), store.getPort(), clientSocketTimeout, trustStorePath, trustStorePassword ); + LOG.info("Opened an SSL connection to metastore, current connections: " + connCount.incrementAndGet()); + } catch(IOException e) { + throw new IllegalArgumentException(e); + } catch(TTransportException e) { + tte = e; + throw new MetaException(e.toString()); + } + } else { + transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); + } + if (useSasl) { // Wrap thrift connection with SASL for secure connection. try { @@ -406,48 +429,24 @@ private void open() throws MetaException { String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE); // tokenSig could be null tokenStrForm = Utils.getTokenStrForm(tokenSig); - transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); if(tokenStrForm != null) { // authenticate using delegation tokens via the "DIGEST" mechanism transport = authBridge.createClientTransport(null, store.getHost(), "DIGEST", tokenStrForm, transport, - MetaStoreUtils.getMetaStoreSaslProperties(conf)); + MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); } else { String principalConfig = conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); transport = authBridge.createClientTransport( principalConfig, store.getHost(), "KERBEROS", null, - transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); + transport, MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); } } catch (IOException ioe) { LOG.error("Couldn't create client transport", ioe); throw new MetaException(ioe.toString()); } } else { - if (useSSL) { - try { - String trustStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH).trim(); - if (trustStorePath.isEmpty()) { - throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH.varname - + " Not configured for SSL connection"); - } - String trustStorePassword = ShimLoader.getHadoopShims().getPassword(conf, - HiveConf.ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD.varname); - - // Create an SSL socket and connect - transport = HiveAuthUtils.getSSLSocket(store.getHost(), store.getPort(), clientSocketTimeout, trustStorePath, trustStorePassword ); - LOG.info("Opened an SSL connection to metastore, current connections: " + connCount.incrementAndGet()); - } catch(IOException e) { - throw new IllegalArgumentException(e); - } catch(TTransportException e) { - tte = e; - throw new MetaException(e.toString()); - } - } else { - transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); - } - if (useFramedTransport) { transport = new TFramedTransport(transport); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 79f6d7f..1b701e0 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -47,6 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -86,6 +87,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge; +import org.apache.hadoop.security.SaslRpcServer; import org.apache.hive.common.util.HiveStringUtils; import org.apache.hive.common.util.ReflectionUtil; @@ -1761,8 +1763,19 @@ public static boolean compareFieldColumns(List schema1, List getMetaStoreSaslProperties(HiveConf conf) { + public static Map getMetaStoreSaslProperties(HiveConf conf, boolean useSSL) { // As of now Hive Meta Store uses the same configuration as Hadoop SASL configuration + + // If SSL is enabled, override the given value of "hadoop.rpc.protection" and set it to "authentication" + // This disables any encryption provided by SASL, since SSL already provides it + String hadoopRpcProtectionVal = conf.get(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION); + String hadoopRpcProtectionAuth = SaslRpcServer.QualityOfProtection.AUTHENTICATION.toString(); + + if (useSSL && hadoopRpcProtectionVal != null && !hadoopRpcProtectionVal.equals(hadoopRpcProtectionAuth)) { + LOG.warn("Overriding value of " + CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION + " setting it from " + + hadoopRpcProtectionVal + " to " + hadoopRpcProtectionAuth + " because SSL is enabled"); + conf.set(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION, hadoopRpcProtectionAuth); + } return ShimLoader.getHadoopThriftAuthBridge().getHadoopSaslProperties(conf); }