diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index b271d65..e68a5a9 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -28,6 +28,8 @@ import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; +import java.util.HashMap; +import java.util.Map; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -52,7 +54,8 @@ public static void beforeTest() throws Exception { String dataFileDir = conf.get("test.data.files").replace('\\', '/') .replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); - miniHS2.start(); + Map confOverlay = new HashMap(); + miniHS2.start(confOverlay); } @Before diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java index d0c4fc2..7b85b97 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java @@ -26,12 +26,13 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +import java.util.HashMap; +import java.util.Map; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.jdbc.miniHS2.MiniHS2; -//import org.apache.hive.service.miniHS2.MiniHS2; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -43,11 +44,17 @@ private static final String KEY_STORE_PASSWORD = "HiveJdbc"; private static final String JAVA_TRUST_STORE_PROP = "javax.net.ssl.trustStore"; private static final String JAVA_TRUST_STORE_PASS_PROP = "javax.net.ssl.trustStorePassword"; + private static final String HS2_BINARY_MODE = "binary"; + private static final String HS2_HTTP_MODE = "http"; + private static final String HS2_HTTP_ENDPOINT = "cliservice"; + private static final String HS2_BINARY_AUTH_MODE = "NONE"; + private static final String HS2_HTTP_AUTH_MODE = "NOSASL"; private MiniHS2 miniHS2 = null; private static HiveConf conf = new HiveConf(); private Connection hs2Conn = null; private String dataFileDir = conf.get("test.data.files"); + private Map confOverlay; @BeforeClass public static void beforeTest() throws Exception { @@ -62,6 +69,7 @@ public void setUp() throws Exception { } dataFileDir = dataFileDir.replace('\\', '/').replace("c:", ""); miniHS2 = new MiniHS2(conf); + confOverlay = new HashMap(); } @After @@ -82,7 +90,10 @@ public void tearDown() throws Exception { */ @Test public void testInvalidConfig() throws Exception { - miniHS2.start(); + clearSslConfOverlay(confOverlay); + // Test in binary mode + setBinaryConfOverlay(confOverlay); + miniHS2.start(confOverlay); DriverManager.setLoginTimeout(4); try { hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + @@ -104,7 +115,25 @@ public void testInvalidConfig() throws Exception { // expected error assertEquals("08S01", e.getSQLState().trim()); } + miniHS2.stop(); + // Test in http mode with ssl properties specified in url + System.clearProperty(JAVA_TRUST_STORE_PROP); + System.clearProperty(JAVA_TRUST_STORE_PASS_PROP); + setHttpConfOverlay(confOverlay); + miniHS2.start(confOverlay); + try { + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + + ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + + TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD + + "?hive.server2.transport.mode=" + HS2_HTTP_MODE + + ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT, + System.getProperty("user.name"), "bar"); + fail("SSL connection should fail with NON-SSL server"); + } catch (SQLException e) { + // expected error + assertEquals("08S01", e.getSQLState().trim()); + } } /*** @@ -113,9 +142,11 @@ public void testInvalidConfig() throws Exception { */ @Test public void testConnectionMismatch() throws Exception { - miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true"); - miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, ""); - miniHS2.start(); + setSslConfOverlay(confOverlay); + // Test in binary mode + setBinaryConfOverlay(confOverlay); + miniHS2.start(confOverlay); + // Start HS2 with SSL try { hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar"); fail("NON SSL connection should fail with SSL server"); @@ -132,6 +163,23 @@ public void testConnectionMismatch() throws Exception { // expected error assertEquals("08S01", e.getSQLState().trim()); } + miniHS2.stop(); + + // Test in http mode + setHttpConfOverlay(confOverlay); + miniHS2.start(confOverlay); + try { + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + + ";ssl=false;sslTrustStore=" + dataFileDir + File.separator + + TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD + + "?hive.server2.transport.mode=" + HS2_HTTP_MODE + + ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT, + System.getProperty("user.name"), "bar"); + fail("NON SSL connection should fail with SSL server"); + } catch (SQLException e) { + // expected error + assertEquals("08S01", e.getSQLState().trim()); + } } @@ -141,14 +189,29 @@ public void testConnectionMismatch() throws Exception { */ @Test public void testSSLConnectionWithURL() throws Exception { + setSslConfOverlay(confOverlay); + // Test in binary mode + setBinaryConfOverlay(confOverlay); // Start HS2 with SSL - startSslSever(); + miniHS2.start(confOverlay); // make SSL connection hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar"); + hs2Conn.close(); + miniHS2.stop(); + // Test in http mode + setHttpConfOverlay(confOverlay); + miniHS2.start(confOverlay); + // make SSL connection + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + + ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + + TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD + + "?hive.server2.transport.mode=" + HS2_HTTP_MODE + + ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT, + System.getProperty("user.name"), "bar"); hs2Conn.close(); } @@ -158,15 +221,28 @@ public void testSSLConnectionWithURL() throws Exception { */ @Test public void testSSLConnectionWithProperty() throws Exception { + setSslConfOverlay(confOverlay); + // Test in binary mode + setBinaryConfOverlay(confOverlay); // Start HS2 with SSL - startSslSever(); + miniHS2.start(confOverlay); System.setProperty(JAVA_TRUST_STORE_PROP, dataFileDir + File.separator + TRUST_STORE_NAME ); System.setProperty(JAVA_TRUST_STORE_PASS_PROP, KEY_STORE_PASSWORD); // make SSL connection hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true", System.getProperty("user.name"), "bar"); + hs2Conn.close(); + miniHS2.stop(); + // Test in http mode + setHttpConfOverlay(confOverlay); + miniHS2.start(confOverlay); + // make SSL connection + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + + ";ssl=true;" + "?hive.server2.transport.mode=" + HS2_HTTP_MODE + + ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT, + System.getProperty("user.name"), "bar"); hs2Conn.close(); } @@ -176,46 +252,111 @@ public void testSSLConnectionWithProperty() throws Exception { */ @Test public void testSSLFetch() throws Exception { + setSslConfOverlay(confOverlay); + // Test in binary mode + setBinaryConfOverlay(confOverlay); // Start HS2 with SSL - startSslSever(); + miniHS2.start(confOverlay); + + String tableName = "sslTab"; + Path dataFilePath = new Path(dataFileDir, "kv1.txt"); // make SSL connection hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar"); - String tableName = "sslTab"; + // Set up test data + setupTestTableWithData(tableName, dataFilePath, hs2Conn); + Statement stmt = hs2Conn.createStatement(); - Path dataFilePath = new Path(dataFileDir, "kv1.txt"); + ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName); + int rowCount = 0; + while (res.next()) { + ++rowCount; + assertEquals("val_" + res.getInt(1), res.getString(2)); + } + // read result over SSL + assertEquals(500, rowCount); - stmt.execute("set hive.support.concurrency = false"); + hs2Conn.close(); + } - stmt.execute("drop table if exists " + tableName); - stmt.execute("create table " + tableName - + " (under_col int comment 'the under column', value string)"); + /** + * Start HS2 in Http mode with SSL enabled, open a SSL connection and fetch data + * @throws Exception + */ + @Test + public void testSSLFetchHttp() throws Exception { + setSslConfOverlay(confOverlay); + // Test in http mode + setHttpConfOverlay(confOverlay); + miniHS2.start(confOverlay); - // load data - stmt.execute("load data local inpath '" - + dataFilePath.toString() + "' into table " + tableName); + String tableName = "sslTab"; + Path dataFilePath = new Path(dataFileDir, "kv1.txt"); + // make SSL connection + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + + ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + + TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD + + "?hive.server2.transport.mode=" + HS2_HTTP_MODE + + ";hive.server2.thrift.http.path=" + HS2_HTTP_ENDPOINT, + System.getProperty("user.name"), "bar"); + + // Set up test data + setupTestTableWithData(tableName, dataFilePath, hs2Conn); + Statement stmt = hs2Conn.createStatement(); ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName); int rowCount = 0; while (res.next()) { ++rowCount; assertEquals("val_" + res.getInt(1), res.getString(2)); } - // read result over SSL assertEquals(500, rowCount); + + hs2Conn.close(); } - private void startSslSever () throws Exception { - miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true"); - miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname, + private void setupTestTableWithData(String tableName, Path dataFilePath, + Connection hs2Conn) throws Exception { + Statement stmt = hs2Conn.createStatement(); + stmt.execute("set hive.support.concurrency = false"); + + stmt.execute("drop table if exists " + tableName); + stmt.execute("create table " + tableName + + " (under_col int comment 'the under column', value string)"); + + // load data + stmt.execute("load data local inpath '" + + dataFilePath.toString() + "' into table " + tableName); + stmt.close(); + } + + private void setSslConfOverlay(Map confOverlay) { + confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true"); + confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname, dataFileDir + File.separator + KEY_STORE_NAME); - miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, + confOverlay.put(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, KEY_STORE_PASSWORD); - miniHS2.start(); } + private void clearSslConfOverlay(Map confOverlay) { + confOverlay.put(ConfVars.HIVE_SERVER2_USE_SSL.varname, "false"); + } + + // Currently http mode works with server in NOSASL auth mode & doesn't support doAs + private void setHttpConfOverlay(Map confOverlay) { + confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_HTTP_MODE); + confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, HS2_HTTP_ENDPOINT); + confOverlay.put(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, HS2_HTTP_AUTH_MODE); + confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "false"); + } + + private void setBinaryConfOverlay(Map confOverlay) { + confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, HS2_BINARY_MODE); + confOverlay.put(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, HS2_BINARY_AUTH_MODE); + confOverlay.put(ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true"); + } } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java deleted file mode 100644 index 5ecd156..0000000 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java +++ /dev/null @@ -1,129 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hive.jdbc.miniHS2; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; - -/*** - * Base class for Hive service - * AbstarctHiveService. - * - */ -public abstract class AbstarctHiveService { - private HiveConf hiveConf = null; - private String hostname; - private int port; - private boolean startedHiveService = false; - - public AbstarctHiveService(HiveConf hiveConf, String hostname, int port) { - this.hiveConf = hiveConf; - this.hostname = hostname; - this.port = port; - } - - /** - * Get Hive conf - * @return - */ - public HiveConf getHiveConf() { - return hiveConf; - } - - /** - * Get config property - * @param propertyKey - * @return - */ - public String getConfProperty(String propertyKey) { - return hiveConf.get(propertyKey); - } - - /** - * Set config property - * @param propertyKey - * @param propertyValue - */ - public void setConfProperty(String propertyKey, String propertyValue) { - System.setProperty(propertyKey, propertyValue); - hiveConf.set(propertyKey, propertyValue); - } - - /** - * Retrieve warehouse directory - * @return - */ - public Path getWareHouseDir() { - return new Path(hiveConf.getVar(ConfVars.METASTOREWAREHOUSE)); - } - - public void setWareHouseDir(String wareHouseURI) { - verifyNotStarted(); - System.setProperty(ConfVars.METASTOREWAREHOUSE.varname, wareHouseURI); - hiveConf.setVar(ConfVars.METASTOREWAREHOUSE, wareHouseURI); - } - - /** - * Set service host - * @param hostName - */ - public void setHost(String hostName) { - this.hostname = hostName; - } - - // get service host - protected String getHost() { - return hostname; - } - - /** - * Set service port # - * @param portNum - */ - public void setPort(int portNum) { - this.port = portNum; - } - - // get service port# - protected int getPort() { - return port; - } - - public boolean isStarted() { - return startedHiveService; - } - - protected void setStarted(boolean hiveServiceStatus) { - this.startedHiveService = hiveServiceStatus; - } - - protected void verifyStarted() { - if (!isStarted()) { - throw new IllegalStateException("HS2 is not running"); - } - } - - protected void verifyNotStarted() { - if (isStarted()) { - throw new IllegalStateException("HS2 alreadyrunning"); - } - } - -} diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java new file mode 100644 index 0000000..e320434 --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java @@ -0,0 +1,144 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.jdbc.miniHS2; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; + +/*** + * Base class for Hive service + * AbstractHiveService. + * + */ +public abstract class AbstractHiveService { + private HiveConf hiveConf = null; + private String hostname; + private int binaryPort; + private int httpPort; + private boolean startedHiveService = false; + + public AbstractHiveService(HiveConf hiveConf, String hostname, int binaryPort, int httpPort) { + this.hiveConf = hiveConf; + this.hostname = hostname; + this.binaryPort = binaryPort; + this.httpPort = httpPort; + } + + /** + * Get Hive conf + * @return + */ + public HiveConf getHiveConf() { + return hiveConf; + } + + /** + * Get config property + * @param propertyKey + * @return + */ + public String getConfProperty(String propertyKey) { + return hiveConf.get(propertyKey); + } + + /** + * Set config property + * @param propertyKey + * @param propertyValue + */ + public void setConfProperty(String propertyKey, String propertyValue) { + System.setProperty(propertyKey, propertyValue); + hiveConf.set(propertyKey, propertyValue); + } + + /** + * Retrieve warehouse directory + * @return + */ + public Path getWareHouseDir() { + return new Path(hiveConf.getVar(ConfVars.METASTOREWAREHOUSE)); + } + + public void setWareHouseDir(String wareHouseURI) { + verifyNotStarted(); + System.setProperty(ConfVars.METASTOREWAREHOUSE.varname, wareHouseURI); + hiveConf.setVar(ConfVars.METASTOREWAREHOUSE, wareHouseURI); + } + + /** + * Set service host + * @param hostName + */ + public void setHost(String hostName) { + this.hostname = hostName; + } + + // get service host + protected String getHost() { + return hostname; + } + + /** + * Set binary service port # + * @param portNum + */ + public void setBinaryPort(int portNum) { + this.binaryPort = portNum; + } + + /** + * Set http service port # + * @param portNum + */ + public void setHttpPort(int portNum) { + this.httpPort = portNum; + } + + // Get binary service port # + protected int getBinaryPort() { + return binaryPort; + } + + // Get http service port # + protected int getHttpPort() { + return httpPort; + } + + public boolean isStarted() { + return startedHiveService; + } + + protected void setStarted(boolean hiveServiceStatus) { + this.startedHiveService = hiveServiceStatus; + } + + protected void verifyStarted() { + if (!isStarted()) { + throw new IllegalStateException("HiveServer2 is not running"); + } + } + + protected void verifyNotStarted() { + if (isStarted()) { + throw new IllegalStateException("HiveServer2 already running"); + } + } + +} diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java index a65e678..5f6834d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java @@ -20,6 +20,7 @@ import java.io.File; import java.io.IOException; +import java.util.Map; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; @@ -33,32 +34,40 @@ import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient; +import org.apache.hive.service.cli.thrift.ThriftHttpCLIService; import org.apache.hive.service.server.HiveServer2; import com.google.common.io.Files; -public class MiniHS2 extends AbstarctHiveService { +public class MiniHS2 extends AbstractHiveService { private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; private HiveServer2 hiveServer2 = null; private final File baseDir; private static final AtomicLong hs2Counter = new AtomicLong(); + private static final String HS2_BINARY_MODE = "binary"; + private static final String HS2_HTTP_MODE = "http"; public MiniHS2(HiveConf hiveConf) throws IOException { - super(hiveConf, "localhost", MetaStoreUtils.findFreePort()); + super(hiveConf, "localhost", MetaStoreUtils.findFreePort(), MetaStoreUtils.findFreePort()); baseDir = Files.createTempDir(); setWareHouseDir("file://" + baseDir.getPath() + File.separator + "warehouse"); String metaStoreURL = "jdbc:derby:" + baseDir.getAbsolutePath() + File.separator + "test_metastore-" + hs2Counter.incrementAndGet() + ";create=true"; - System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL); hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStoreURL); + hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE); hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, getHost()); - hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getPort()); + hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getBinaryPort()); + hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort()); HiveMetaStore.HMSHandler.resetDefaultDBFlag(); } - public void start() throws Exception { + public void start(Map confOverlay) throws Exception { hiveServer2 = new HiveServer2(); + // Set confOverlay parameters + for (Map.Entry entry : confOverlay.entrySet()) { + setConfProperty(entry.getKey(), entry.getValue()); + } hiveServer2.init(getHiveConf()); hiveServer2.start(); waitForStartup(); @@ -80,14 +89,23 @@ public CLIServiceClient getServiceClient() { public CLIServiceClient getServiceClientInternal() { for (Service service : hiveServer2.getServices()) { if (service instanceof ThriftBinaryCLIService) { - return new ThriftCLIServiceClient((ThriftBinaryCLIService)service); + return new ThriftCLIServiceClient((ThriftBinaryCLIService) service); + } + if (service instanceof ThriftHttpCLIService) { + return new ThriftCLIServiceClient((ThriftHttpCLIService) service); } } - throw new IllegalStateException("HS2 not running Thrift service"); + throw new IllegalStateException("HiveServer2 not running Thrift service"); } public String getJdbcURL() { - return "jdbc:hive2://" + getHost() + ":" + getPort() + "/default"; + String transportMode = getConfProperty(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname); + if(transportMode != null && (transportMode.equalsIgnoreCase(HS2_HTTP_MODE))) { + return "jdbc:hive2://" + getHost() + ":" + getHttpPort() + "/default"; + } + else { + return "jdbc:hive2://" + getHost() + ":" + getBinaryPort() + "/default"; + } } public static String getJdbcDriverName() { @@ -103,7 +121,7 @@ private void waitForStartup() throws Exception { Thread.sleep(500L); waitTime += 500L; if (waitTime > startupTimeout) { - throw new TimeoutException("Couldn't access new HiveServer: " + getJdbcURL()); + throw new TimeoutException("Couldn't access new HiveServer2: " + getJdbcURL()); } try { sessionHandle = hs2Client.openSession("foo", "bar"); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java index 910de9b..7b8f356 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java @@ -47,12 +47,12 @@ public static void beforeTest() throws IOException { @Before public void setUp() throws Exception { - miniHS2.start(); confOverlay = new HashMap(); + miniHS2.start(confOverlay); } @After - public void tearDown() { + public void tearDown() throws Exception { miniHS2.stop(); } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java index 65177dd..57fda94 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java @@ -35,11 +35,11 @@ import org.junit.Test; /** -* -* TestThriftHttpCLIService. -* This tests ThriftCLIService started in http mode. -* -*/ + * + * TestThriftHttpCLIService. + * This tests ThriftCLIService started in http mode. + * + */ public class TestThriftHttpCLIService extends ThriftCLIServiceTest { diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 597fa1e..371f600 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -18,6 +18,8 @@ package org.apache.hive.jdbc; +import java.io.FileInputStream; +import java.security.KeyStore; import java.sql.Array; import java.sql.Blob; import java.sql.CallableStatement; @@ -44,9 +46,12 @@ import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; +import javax.net.ssl.SSLContext; import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.service.auth.HiveAuthFactory; @@ -60,10 +65,13 @@ import org.apache.hive.service.cli.thrift.TOpenSessionResp; import org.apache.hive.service.cli.thrift.TProtocolVersion; import org.apache.hive.service.cli.thrift.TSessionHandle; -import org.apache.http.impl.client.DefaultHttpClient; +import org.apache.http.HttpRequestInterceptor; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.SSLContexts; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; -import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.THttpClient; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; @@ -73,6 +81,7 @@ * */ public class HiveConnection implements java.sql.Connection { + public static final Log LOG = LogFactory.getLog(HiveConnection.class.getName()); private static final String HIVE_AUTH_TYPE= "auth"; private static final String HIVE_AUTH_QOP = "sasl.qop"; private static final String HIVE_AUTH_SIMPLE = "noSasl"; @@ -84,6 +93,9 @@ private static final String HIVE_USE_SSL = "ssl"; private static final String HIVE_SSL_TRUST_STORE = "sslTrustStore"; private static final String HIVE_SSL_TRUST_STORE_PASSWORD = "trustStorePassword"; + // Currently supports JKS keystore format + // See HIVE-6286 (Add support for PKCS12 keystore format) + private static final String HIVE_SSL_TRUST_STORE_TYPE = "JKS"; private final String jdbcURI; private final String host; @@ -153,7 +165,7 @@ private void openTransport() throws SQLException { transport = isHttpTransportMode() ? createHttpTransport() : createBinaryTransport(); try { if (!transport.isOpen()) { - transport.open(); + transport.open(); } } catch (TTransportException e) { throw new SQLException("Could not open connection to " @@ -162,22 +174,25 @@ private void openTransport() throws SQLException { } private TTransport createHttpTransport() throws SQLException { + CloseableHttpClient httpClient; // http path should begin with "/" String httpPath; - httpPath = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname); + httpPath = hiveConfMap.get( + HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname); if(httpPath == null) { httpPath = "/"; } if(!httpPath.startsWith("/")) { httpPath = "/" + httpPath; } - - DefaultHttpClient httpClient = new DefaultHttpClient(); - String httpUrl = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname) + - "://" + host + ":" + port + httpPath; - httpClient.addRequestInterceptor( - new HttpBasicAuthInterceptor(getUserName(), getPasswd()) - ); + boolean useSsl = "true".equalsIgnoreCase(sessConfMap.get(HIVE_USE_SSL)); + // Create an http client from the configs + httpClient = getHttpClient(useSsl); + + // Create the http/https url + // JDBC driver will set up an https url if ssl is enabled, otherwise http + String schemeName = useSsl ? "https" : "http"; + String httpUrl = schemeName + "://" + host + ":" + port + httpPath; try { transport = new THttpClient(httpUrl, httpClient); } @@ -189,6 +204,45 @@ private TTransport createHttpTransport() throws SQLException { return transport; } + private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException { + // Add an interceptor to pass username/password in the header + // for basic preemtive http authentication at the server + // In https mode, the entire information is encrypted + HttpRequestInterceptor authInterceptor = new HttpBasicAuthInterceptor( + getUserName(), getPasswd()); + if (useSsl) { + String sslTrustStorePath = sessConfMap.get(HIVE_SSL_TRUST_STORE); + String sslTrustStorePassword = sessConfMap.get( + HIVE_SSL_TRUST_STORE_PASSWORD); + KeyStore sslTrustStore; + SSLContext sslContext; + if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) { + // Create a default client context based on standard JSSE trust material + sslContext = SSLContexts.createDefault(); + } else { + // Pick trust store config from the given path + try { + sslTrustStore = KeyStore.getInstance(HIVE_SSL_TRUST_STORE_TYPE); + sslTrustStore.load(new FileInputStream(sslTrustStorePath), + sslTrustStorePassword.toCharArray()); + sslContext = SSLContexts.custom().loadTrustMaterial( + sslTrustStore).build(); + } + catch (Exception e) { + String msg = "Could not create an https connection to " + + jdbcURI + ". " + e.getMessage(); + throw new SQLException(msg, " 08S01", e); + } + } + return HttpClients.custom().setHostnameVerifier(SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER).setSslcontext( + sslContext).addInterceptorFirst(authInterceptor).build(); + } + else { + // Create a plain http client + return HttpClients.custom().addInterceptorFirst(authInterceptor).build(); + } + } + private TTransport createBinaryTransport() throws SQLException { try { // handle secure connection if specified @@ -201,8 +255,8 @@ private TTransport createBinaryTransport() throws SQLException { try { saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP)); } catch (IllegalArgumentException e) { - throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), - "42000", e); + throw new SQLException("Invalid " + HIVE_AUTH_QOP + + " parameter. " + e.getMessage(), "42000", e); } } saslProps.put(Sasl.QOP, saslQOP.toString()); @@ -251,8 +305,7 @@ private TTransport createBinaryTransport() throws SQLException { private boolean isHttpTransportMode() { String transportMode = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname); - if(transportMode != null && (transportMode.equalsIgnoreCase("http") || - transportMode.equalsIgnoreCase("https"))) { + if(transportMode != null && (transportMode.equalsIgnoreCase("http"))) { return true; } return false; @@ -275,7 +328,7 @@ private void openSession() throws SQLException { protocol = openResp.getServerProtocolVersion(); sessHandle = openResp.getSessionHandle(); } catch (TException e) { - e.printStackTrace(); + LOG.error("Error opening session", e); throw new SQLException("Could not establish connection to " + jdbcURI + ": " + e.getMessage(), " 08S01", e); } @@ -357,6 +410,7 @@ public void abort(Executor executor) throws SQLException { * @see java.sql.Connection#clearWarnings() */ + @Override public void clearWarnings() throws SQLException { warningChain = null; } @@ -367,6 +421,7 @@ public void clearWarnings() throws SQLException { * @see java.sql.Connection#close() */ + @Override public void close() throws SQLException { if (!isClosed) { TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); @@ -389,6 +444,7 @@ public void close() throws SQLException { * @see java.sql.Connection#commit() */ + @Override public void commit() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -401,6 +457,7 @@ public void commit() throws SQLException { * java.lang.Object[]) */ + @Override public Array createArrayOf(String arg0, Object[] arg1) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -412,6 +469,7 @@ public Array createArrayOf(String arg0, Object[] arg1) throws SQLException { * @see java.sql.Connection#createBlob() */ + @Override public Blob createBlob() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -423,6 +481,7 @@ public Blob createBlob() throws SQLException { * @see java.sql.Connection#createClob() */ + @Override public Clob createClob() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -434,6 +493,7 @@ public Clob createClob() throws SQLException { * @see java.sql.Connection#createNClob() */ + @Override public NClob createNClob() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -445,6 +505,7 @@ public NClob createNClob() throws SQLException { * @see java.sql.Connection#createSQLXML() */ + @Override public SQLXML createSQLXML() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -458,6 +519,7 @@ public SQLXML createSQLXML() throws SQLException { * @see java.sql.Connection#createStatement() */ + @Override public Statement createStatement() throws SQLException { if (isClosed) { throw new SQLException("Can't create Statement, connection is closed"); @@ -471,6 +533,7 @@ public Statement createStatement() throws SQLException { * @see java.sql.Connection#createStatement(int, int) */ + @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { @@ -491,6 +554,7 @@ public Statement createStatement(int resultSetType, int resultSetConcurrency) * @see java.sql.Connection#createStatement(int, int, int) */ + @Override public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { // TODO Auto-generated method stub @@ -503,6 +567,7 @@ public Statement createStatement(int resultSetType, int resultSetConcurrency, * @see java.sql.Connection#createStruct(java.lang.String, java.lang.Object[]) */ + @Override public Struct createStruct(String typeName, Object[] attributes) throws SQLException { // TODO Auto-generated method stub @@ -515,6 +580,7 @@ public Struct createStruct(String typeName, Object[] attributes) * @see java.sql.Connection#getAutoCommit() */ + @Override public boolean getAutoCommit() throws SQLException { return true; } @@ -525,6 +591,7 @@ public boolean getAutoCommit() throws SQLException { * @see java.sql.Connection#getCatalog() */ + @Override public String getCatalog() throws SQLException { return ""; } @@ -535,6 +602,7 @@ public String getCatalog() throws SQLException { * @see java.sql.Connection#getClientInfo() */ + @Override public Properties getClientInfo() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -546,6 +614,7 @@ public Properties getClientInfo() throws SQLException { * @see java.sql.Connection#getClientInfo(java.lang.String) */ + @Override public String getClientInfo(String name) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -557,6 +626,7 @@ public String getClientInfo(String name) throws SQLException { * @see java.sql.Connection#getHoldability() */ + @Override public int getHoldability() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -568,6 +638,7 @@ public int getHoldability() throws SQLException { * @see java.sql.Connection#getMetaData() */ + @Override public DatabaseMetaData getMetaData() throws SQLException { if (isClosed) { throw new SQLException("Connection is closed"); @@ -592,6 +663,7 @@ public String getSchema() throws SQLException { * @see java.sql.Connection#getTransactionIsolation() */ + @Override public int getTransactionIsolation() throws SQLException { return Connection.TRANSACTION_NONE; } @@ -602,6 +674,7 @@ public int getTransactionIsolation() throws SQLException { * @see java.sql.Connection#getTypeMap() */ + @Override public Map> getTypeMap() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -613,6 +686,7 @@ public int getTransactionIsolation() throws SQLException { * @see java.sql.Connection#getWarnings() */ + @Override public SQLWarning getWarnings() throws SQLException { return warningChain; } @@ -623,6 +697,7 @@ public SQLWarning getWarnings() throws SQLException { * @see java.sql.Connection#isClosed() */ + @Override public boolean isClosed() throws SQLException { return isClosed; } @@ -633,6 +708,7 @@ public boolean isClosed() throws SQLException { * @see java.sql.Connection#isReadOnly() */ + @Override public boolean isReadOnly() throws SQLException { return false; } @@ -643,6 +719,7 @@ public boolean isReadOnly() throws SQLException { * @see java.sql.Connection#isValid(int) */ + @Override public boolean isValid(int timeout) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -654,6 +731,7 @@ public boolean isValid(int timeout) throws SQLException { * @see java.sql.Connection#nativeSQL(java.lang.String) */ + @Override public String nativeSQL(String sql) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -665,6 +743,7 @@ public String nativeSQL(String sql) throws SQLException { * @see java.sql.Connection#prepareCall(java.lang.String) */ + @Override public CallableStatement prepareCall(String sql) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -676,6 +755,7 @@ public CallableStatement prepareCall(String sql) throws SQLException { * @see java.sql.Connection#prepareCall(java.lang.String, int, int) */ + @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { // TODO Auto-generated method stub @@ -688,6 +768,7 @@ public CallableStatement prepareCall(String sql, int resultSetType, * @see java.sql.Connection#prepareCall(java.lang.String, int, int, int) */ + @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { // TODO Auto-generated method stub @@ -700,6 +781,7 @@ public CallableStatement prepareCall(String sql, int resultSetType, * @see java.sql.Connection#prepareStatement(java.lang.String) */ + @Override public PreparedStatement prepareStatement(String sql) throws SQLException { return new HivePreparedStatement(this, client, sessHandle, sql); } @@ -710,6 +792,7 @@ public PreparedStatement prepareStatement(String sql) throws SQLException { * @see java.sql.Connection#prepareStatement(java.lang.String, int) */ + @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { return new HivePreparedStatement(this, client, sessHandle, sql); @@ -721,6 +804,7 @@ public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) * @see java.sql.Connection#prepareStatement(java.lang.String, int[]) */ + @Override public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { // TODO Auto-generated method stub @@ -734,6 +818,7 @@ public PreparedStatement prepareStatement(String sql, int[] columnIndexes) * java.lang.String[]) */ + @Override public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { // TODO Auto-generated method stub @@ -746,6 +831,7 @@ public PreparedStatement prepareStatement(String sql, String[] columnNames) * @see java.sql.Connection#prepareStatement(java.lang.String, int, int) */ + @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { return new HivePreparedStatement(this, client, sessHandle, sql); @@ -757,6 +843,7 @@ public PreparedStatement prepareStatement(String sql, int resultSetType, * @see java.sql.Connection#prepareStatement(java.lang.String, int, int, int) */ + @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { // TODO Auto-generated method stub @@ -769,6 +856,7 @@ public PreparedStatement prepareStatement(String sql, int resultSetType, * @see java.sql.Connection#releaseSavepoint(java.sql.Savepoint) */ + @Override public void releaseSavepoint(Savepoint savepoint) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -780,6 +868,7 @@ public void releaseSavepoint(Savepoint savepoint) throws SQLException { * @see java.sql.Connection#rollback() */ + @Override public void rollback() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -791,6 +880,7 @@ public void rollback() throws SQLException { * @see java.sql.Connection#rollback(java.sql.Savepoint) */ + @Override public void rollback(Savepoint savepoint) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -802,6 +892,7 @@ public void rollback(Savepoint savepoint) throws SQLException { * @see java.sql.Connection#setAutoCommit(boolean) */ + @Override public void setAutoCommit(boolean autoCommit) throws SQLException { if (autoCommit) { throw new SQLException("enabling autocommit is not supported"); @@ -814,6 +905,7 @@ public void setAutoCommit(boolean autoCommit) throws SQLException { * @see java.sql.Connection#setCatalog(java.lang.String) */ + @Override public void setCatalog(String catalog) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -825,6 +917,7 @@ public void setCatalog(String catalog) throws SQLException { * @see java.sql.Connection#setClientInfo(java.util.Properties) */ + @Override public void setClientInfo(Properties properties) throws SQLClientInfoException { // TODO Auto-generated method stub @@ -837,6 +930,7 @@ public void setClientInfo(Properties properties) * @see java.sql.Connection#setClientInfo(java.lang.String, java.lang.String) */ + @Override public void setClientInfo(String name, String value) throws SQLClientInfoException { // TODO Auto-generated method stub @@ -849,6 +943,7 @@ public void setClientInfo(String name, String value) * @see java.sql.Connection#setHoldability(int) */ + @Override public void setHoldability(int holdability) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -865,6 +960,7 @@ public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLExc * @see java.sql.Connection#setReadOnly(boolean) */ + @Override public void setReadOnly(boolean readOnly) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -876,6 +972,7 @@ public void setReadOnly(boolean readOnly) throws SQLException { * @see java.sql.Connection#setSavepoint() */ + @Override public Savepoint setSavepoint() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -887,6 +984,7 @@ public Savepoint setSavepoint() throws SQLException { * @see java.sql.Connection#setSavepoint(java.lang.String) */ + @Override public Savepoint setSavepoint(String name) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -903,6 +1001,7 @@ public void setSchema(String schema) throws SQLException { * @see java.sql.Connection#setTransactionIsolation(int) */ + @Override public void setTransactionIsolation(int level) throws SQLException { // TODO: throw an exception? } @@ -913,6 +1012,7 @@ public void setTransactionIsolation(int level) throws SQLException { * @see java.sql.Connection#setTypeMap(java.util.Map) */ + @Override public void setTypeMap(Map> map) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -924,6 +1024,7 @@ public void setTypeMap(Map> map) throws SQLException { * @see java.sql.Wrapper#isWrapperFor(java.lang.Class) */ + @Override public boolean isWrapperFor(Class iface) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -935,6 +1036,7 @@ public boolean isWrapperFor(Class iface) throws SQLException { * @see java.sql.Wrapper#unwrap(java.lang.Class) */ + @Override public T unwrap(Class iface) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); diff --git a/pom.xml b/pom.xml index 41f5337..83518ef 100644 --- a/pom.xml +++ b/pom.xml @@ -102,8 +102,8 @@ 0.96.0-hadoop1 0.96.0-hadoop2 - 4.2.5 - 4.2.4 + 4.3.2 + 4.3.1 1.9.2 0.3.2 5.5.1 @@ -111,6 +111,7 @@ 6.1.26 7.6.0.v20120127 + 7.6.0.v20120127 1.14 0.9.94 1.1 diff --git a/service/pom.xml b/service/pom.xml index dff3174..b1002e2 100644 --- a/service/pom.xml +++ b/service/pom.xml @@ -67,14 +67,9 @@ ${commons-logging.version} - org.mortbay.jetty - jetty - ${jetty.version} - - - org.mortbay.jetty - jetty-util - ${jetty.version} + org.eclipse.jetty.aggregate + jetty-all + ${jetty.hive-service.version} org.apache.thrift diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index b5a6138..26bda5a 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -59,7 +59,7 @@ protected int portNum; protected InetSocketAddress serverAddress; protected TServer server; - protected org.mortbay.jetty.Server httpServer; + protected org.eclipse.jetty.server.Server httpServer; private boolean isStarted = false; protected boolean isEmbedded = false; diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java index e487a7f..a6ff6ce 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java @@ -26,10 +26,12 @@ import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.server.TServlet; -import org.mortbay.jetty.nio.SelectChannelConnector; -import org.mortbay.jetty.servlet.Context; -import org.mortbay.jetty.servlet.ServletHolder; -import org.mortbay.thread.QueuedThreadPool; +import org.eclipse.jetty.server.nio.SelectChannelConnector; +import org.eclipse.jetty.server.ssl.SslSelectChannelConnector; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import org.eclipse.jetty.util.ssl.SslContextFactory; +import org.eclipse.jetty.util.thread.QueuedThreadPool; public class ThriftHttpCLIService extends ThriftCLIService { @@ -75,15 +77,31 @@ public void run() { } } - httpServer = new org.mortbay.jetty.Server(); - + httpServer = new org.eclipse.jetty.server.Server(); QueuedThreadPool threadPool = new QueuedThreadPool(); threadPool.setMinThreads(minWorkerThreads); threadPool.setMaxThreads(maxWorkerThreads); httpServer.setThreadPool(threadPool); - SelectChannelConnector connector = new SelectChannelConnector(); - connector.setPort(portNum); + SelectChannelConnector connector; + Boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL); + String schemeName = useSsl ? "https" : "http"; + + if (useSsl) { + String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim(); + String keyStorePassword = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD); + if (keyStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname + + " Not configured for SSL connection"); + } + SslContextFactory sslContextFactory = new SslContextFactory(); + sslContextFactory.setKeyStorePath(keyStorePath); + sslContextFactory.setKeyStorePassword(keyStorePassword); + connector = new SslSelectChannelConnector(sslContextFactory); + } else { + connector = new SelectChannelConnector(); + } + connector.setPort(portNum); // Linux:yes, Windows:no connector.setReuseAddress(!Shell.WINDOWS); httpServer.addConnector(connector); @@ -93,12 +111,15 @@ public void run() { TProtocolFactory protocolFactory = new TBinaryProtocol.Factory(); TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory); - final Context context = new Context(httpServer, "/", Context.SESSIONS); + + final ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); + context.setContextPath("/"); + httpServer.setHandler(context); context.addServlet(new ServletHolder(thriftHttpServlet), httpPath); // TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc. httpServer.start(); - String msg = "Starting CLIService in Http mode on port " + portNum + + String msg = "Started ThriftHttpCLIService in " + schemeName + " mode on port " + portNum + " path=" + httpPath + " with " + minWorkerThreads + ".." + maxWorkerThreads + " worker threads"; LOG.info(msg); diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index fa13783..8700c5b 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -54,8 +54,7 @@ public synchronized void init(HiveConf hiveConf) { if(transportMode == null) { transportMode = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE); } - if(transportMode != null && (transportMode.equalsIgnoreCase("http") || - transportMode.equalsIgnoreCase("https"))) { + if(transportMode != null && (transportMode.equalsIgnoreCase("http"))) { thriftCLIService = new ThriftHttpCLIService(cliService); } else {