diff --git a/build.properties b/build.properties
index 3009bba..bbdca67 100644
--- a/build.properties
+++ b/build.properties
@@ -77,7 +77,7 @@ common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
# full profile
iterate.hive.full.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
iterate.hive.full.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
-iterate.hive.full.tests=common,ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service,hcatalog
+iterate.hive.full.tests=common,ql,contrib,hbase-handler,hwi,service,jdbc,beeline,metastore,odbc,serde,hcatalog
iterate.hive.full.thrift=ql,service,metastore,serde
iterate.hive.full.protobuf=ql
iterate.hive.full.cpp=odbc
@@ -85,7 +85,7 @@ iterate.hive.full.cpp=odbc
# no hcatalog profile
iterate.hive.nohcat.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils
iterate.hive.nohcat.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils
-iterate.hive.nohcat.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service
+iterate.hive.nohcat.tests=ql,contrib,hbase-handler,hwi,service,jdbc,beeline,metastore,odbc,serde
iterate.hive.nohcat.thrift=common,ql,service,metastore,serde
iterate.hive.nohcat.protobuf=ql
iterate.hive.nohcat.cpp=odbc
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index abfde42..9187a27 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -759,7 +759,7 @@
// Number of async threads
HIVE_SERVER2_ASYNC_EXEC_THREADS("hive.server2.async.exec.threads", 50),
// Number of seconds HiveServer2 shutdown will wait for async threads to terminate
- HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10),
+ HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10L),
// HiveServer2 auth configuration
@@ -773,6 +773,9 @@
HIVE_SERVER2_ENABLE_DOAS("hive.server2.enable.doAs", true),
HIVE_SERVER2_TABLE_TYPE_MAPPING("hive.server2.table.type.mapping", "CLASSIC"),
HIVE_SERVER2_SESSION_HOOK("hive.server2.session.hook", ""),
+ HIVE_SERVER2_USE_SSL("hive.server2.use.SSL", false),
+ HIVE_SERVER2_SSL_KEYSTORE_PATH("hive.server2.keystore.path", ""),
+ HIVE_SERVER2_SSL_KEYSTORE_PASSWORD("hive.server2.keystore.password", ""),
HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,delete,compile"),
diff --git a/data/files/keystore.jks b/data/files/keystore.jks
new file mode 100644
index 0000000..469d8a5
Binary files /dev/null and b/data/files/keystore.jks differ
diff --git a/data/files/truststore.jks b/data/files/truststore.jks
new file mode 100644
index 0000000..9c5d703
Binary files /dev/null and b/data/files/truststore.jks differ
diff --git a/eclipse-templates/TestJdbcMiniHS2.launchtemplate b/eclipse-templates/TestJdbcMiniHS2.launchtemplate
new file mode 100644
index 0000000..537ba12
--- /dev/null
+++ b/eclipse-templates/TestJdbcMiniHS2.launchtemplate
@@ -0,0 +1,44 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/jdbc/build.xml b/jdbc/build.xml
index d3c0f0c..0c5aa76 100644
--- a/jdbc/build.xml
+++ b/jdbc/build.xml
@@ -33,6 +33,7 @@
+
diff --git a/jdbc/ivy.xml b/jdbc/ivy.xml
index b9d0cea..9866246 100644
--- a/jdbc/ivy.xml
+++ b/jdbc/ivy.xml
@@ -33,6 +33,8 @@
rev="${httpcore.version}"/>
+
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index f155686..b7c09d9 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -24,6 +24,7 @@
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
+import java.sql.DriverManager;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
@@ -46,6 +47,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
import org.apache.hive.service.auth.SaslQOP;
@@ -61,7 +63,6 @@
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.THttpClient;
-import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
@@ -78,6 +79,10 @@
private static final String HIVE_AUTH_PASSWD = "password";
private static final String HIVE_ANONYMOUS_USER = "anonymous";
private static final String HIVE_ANONYMOUS_PASSWD = "anonymous";
+ private static final String HIVE_USE_SSL = "useSSL";
+ private static final String HIVE_SSL_TRUST_STORE = "sslTrustStore";
+ private static final String HIVE_SSL_TRUST_STORE_PASSWORD = "trustStorePassword";
+
private final String jdbcURI;
private final String host;
private final int port;
@@ -91,8 +96,10 @@
private SQLWarning warningChain = null;
private TSessionHandle sessHandle = null;
private final List supportedProtocols = new LinkedList();
+ private int loginTimeout = 0;
public HiveConnection(String uri, Properties info) throws SQLException {
+ loginTimeout = DriverManager.getLoginTimeout();
jdbcURI = uri;
// parse the connection uri
Utils.JdbcConnectionParams connParams = Utils.parseURL(jdbcURI);
@@ -177,26 +184,27 @@ private TTransport createHttpTransport() throws SQLException {
}
private TTransport createBinaryTransport() throws SQLException {
- transport = new TSocket(host, port);
// handle secure connection if specified
- if (!sessConfMap.containsKey(HIVE_AUTH_TYPE)
- || !sessConfMap.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)) {
- try {
+ try {
+ if (!sessConfMap.containsKey(HIVE_AUTH_TYPE)
+ || !sessConfMap.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)) {
// If Kerberos
if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) {
Map saslProps = new HashMap();
SaslQOP saslQOP = SaslQOP.AUTH;
- if(sessConfMap.containsKey(HIVE_AUTH_QOP)) {
+ if (sessConfMap.containsKey(HIVE_AUTH_QOP)) {
try {
saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP));
} catch (IllegalArgumentException e) {
- throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), "42000", e);
+ throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(),
+ "42000", e);
}
}
saslProps.put(Sasl.QOP, saslQOP.toString());
saslProps.put(Sasl.SERVER_AUTH, "true");
transport = KerberosSaslHelper.getKerberosTransport(
- sessConfMap.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
+ sessConfMap.get(HIVE_AUTH_PRINCIPAL), host,
+ HiveAuthFactory.getSocketTransport(host, port, loginTimeout), saslProps);
} else {
String userName = sessConfMap.get(HIVE_AUTH_USER);
if ((userName == null) || userName.isEmpty()) {
@@ -206,12 +214,30 @@ private TTransport createBinaryTransport() throws SQLException {
if ((passwd == null) || passwd.isEmpty()) {
passwd = HIVE_ANONYMOUS_PASSWD;
}
+ String useSslStr = sessConfMap.get(HIVE_USE_SSL);
+ if (useSslStr != null && "true".equalsIgnoreCase(useSslStr)) {
+ String sslTrustStore = sessConfMap.get(HIVE_SSL_TRUST_STORE);
+ String sslTrustStorePassword = sessConfMap.get(HIVE_SSL_TRUST_STORE_PASSWORD);
+ if (sslTrustStore == null || sslTrustStore.isEmpty()) {
+ transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout);
+ } else {
+ transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout,
+ sslTrustStore, sslTrustStorePassword);
+ }
+ } else {
+ transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout);
+ }
transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
}
- } catch (SaslException e) {
- throw new SQLException("Could not create secure connection to "
- + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+ } else {
+ transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout);
}
+ } catch (SaslException e) {
+ throw new SQLException("Could not create secure connection to "
+ + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+ } catch (TTransportException e) {
+ throw new SQLException("Could not create connection to "
+ + jdbcURI + ": " + e.getMessage(), " 08S01", e);
}
return transport;
}
diff --git a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
new file mode 100644
index 0000000..74d3241
--- /dev/null
+++ b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
@@ -0,0 +1,68 @@
+package org.apache.hive.jdbc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.Statement;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+ public class TestJdbcWithMiniHS2 {
+ private static MiniHS2 miniHS2 = null;
+ private static Path dataFilePath;
+
+ private Connection hs2Conn = null;
+
+ @BeforeClass
+ public static void beforeTest() throws Exception {
+ Class.forName(MiniHS2.getJdbcDriverName());
+ HiveConf conf = new HiveConf();
+ miniHS2 = new MiniHS2(conf);
+ String dataFileDir = conf.get("test.data.files").replace('\\', '/')
+ .replace("c:", "");
+ dataFilePath = new Path(dataFileDir, "kv1.txt");
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ miniHS2.start();
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar");
+ hs2Conn.createStatement().execute("set hive.support.concurrency = false");
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ hs2Conn.close();
+ miniHS2.stop();
+ }
+
+ @Test
+ public void testConnection() throws Exception {
+ String tableName = "testTab1";
+ Statement stmt = hs2Conn.createStatement();
+
+ // create table
+ stmt.execute("DROP TABLE IF EXISTS " + tableName);
+ stmt.execute("CREATE TABLE " + tableName
+ + " (under_col INT COMMENT 'the under column', value STRING) COMMENT ' test table'");
+
+ // load data
+ stmt.execute("load data local inpath '"
+ + dataFilePath.toString() + "' into table " + tableName);
+
+ ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName);
+ assertTrue(res.next());
+ assertEquals("val_238", res.getString(2));
+ res.close();
+ stmt.close();
+ }
+}
diff --git a/jdbc/src/test/org/apache/hive/jdbc/TestSSL.java b/jdbc/src/test/org/apache/hive/jdbc/TestSSL.java
new file mode 100644
index 0000000..508403c
--- /dev/null
+++ b/jdbc/src/test/org/apache/hive/jdbc/TestSSL.java
@@ -0,0 +1,203 @@
+package org.apache.hive.jdbc;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+//import org.apache.hive.service.miniHS2.MiniHS2;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestSSL {
+ private static final String KEY_STORE_NAME = "keystore.jks";
+ private static final String TRUST_STORE_NAME = "truststore.jks";
+ private static final String KEY_STORE_PASSWORD = "HiveJdbc";
+ private static final String JAVA_TRUST_STORE_PROP = "javax.net.ssl.trustStore";
+ private static final String JAVA_TRUST_STORE_PASS_PROP = "javax.net.ssl.trustStorePassword";
+
+ private MiniHS2 miniHS2 = null;
+ private static HiveConf conf = new HiveConf();
+ private Connection hs2Conn = null;
+ private String dataFileDir = conf.get("test.data.files");
+
+ @BeforeClass
+ public static void beforeTest() throws Exception {
+ Class.forName(MiniHS2.getJdbcDriverName());
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ DriverManager.setLoginTimeout(0);
+ if (!System.getProperty("test.data.files", "").isEmpty()) {
+ dataFileDir = System.getProperty("test.data.files");
+ }
+ dataFileDir = dataFileDir.replace('\\', '/').replace("c:", "");
+ miniHS2 = new MiniHS2(conf);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ if (hs2Conn != null) {
+ hs2Conn.close();
+ }
+ if (miniHS2 != null && miniHS2.isStarted()) {
+ miniHS2.stop();
+ }
+ System.clearProperty(JAVA_TRUST_STORE_PROP);
+ System.clearProperty(JAVA_TRUST_STORE_PASS_PROP);
+ }
+
+ /***
+ * Test SSL client with non-SSL server fails
+ * @throws Exception
+ */
+ @Test
+ public void testInvalidConfig() throws Exception {
+ miniHS2.start();
+ DriverManager.setLoginTimeout(4);
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";useSSL=true;sslTrustStore=" +
+ dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
+ KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+ fail("SSL connection should fail with NON-SSL server");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+
+ System.setProperty(JAVA_TRUST_STORE_PROP, dataFileDir + File.separator + TRUST_STORE_NAME );
+ System.setProperty(JAVA_TRUST_STORE_PASS_PROP, KEY_STORE_PASSWORD);
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";useSSL=true",
+ System.getProperty("user.name"), "bar");
+ fail("SSL connection should fail with NON-SSL server");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+
+ }
+
+ /***
+ * Test non-SSL client with SSL server fails
+ * @throws Exception
+ */
+ @Test
+ public void testConnectionMismatch() throws Exception {
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true");
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, "");
+ miniHS2.start();
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar");
+ fail("NON SSL connection should fail with SSL server");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL()+ ";useSSL=false",
+ System.getProperty("user.name"), "bar");
+ fail("NON SSL connection should fail with SSL server");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+
+ }
+
+ /***
+ * Test SSL client connection to SSL server
+ * @throws Exception
+ */
+ @Test
+ public void testSSLConnectionWithURL() throws Exception {
+ // Start HS2 with SSL
+ startSslSever();
+
+ // make SSL connection
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";useSSL=true;sslTrustStore=" +
+ dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
+ KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+
+ hs2Conn.close();
+ }
+
+ /***
+ * Test SSL client connection to SSL server
+ * @throws Exception
+ */
+ @Test
+ public void testSSLConnectionWithProperty() throws Exception {
+ // Start HS2 with SSL
+ startSslSever();
+
+ System.setProperty(JAVA_TRUST_STORE_PROP, dataFileDir + File.separator + TRUST_STORE_NAME );
+ System.setProperty(JAVA_TRUST_STORE_PASS_PROP, KEY_STORE_PASSWORD);
+ // make SSL connection
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";useSSL=true",
+ System.getProperty("user.name"), "bar");
+
+ hs2Conn.close();
+ }
+
+ /**
+ * Start HS2 in SSL mode, open a SSL connection and fetch data
+ * @throws Exception
+ */
+ @Test
+ public void testSSLFetch() throws Exception {
+ // Start HS2 with SSL
+ startSslSever();
+
+ // make SSL connection
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";useSSL=true;sslTrustStore=" +
+ dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
+ KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+
+ String tableName = "sslTab";
+ Statement stmt = hs2Conn.createStatement();
+ Path dataFilePath = new Path(dataFileDir, "kv1.txt");
+
+ stmt.execute("set hive.support.concurrency = false");
+
+ stmt.execute("drop table if exists " + tableName);
+ stmt.execute("create table " + tableName
+ + " (under_col int comment 'the under column', value string)");
+
+ // load data
+ stmt.execute("load data local inpath '"
+ + dataFilePath.toString() + "' into table " + tableName);
+
+ ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName);
+ int rowCount = 0;
+ while (res.next()) {
+ ++rowCount;
+ assertEquals("val_" + res.getInt(1), res.getString(2));
+ }
+
+ // read result over SSL
+ assertEquals(500, rowCount);
+ }
+
+ private void startSslSever () throws Exception {
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true");
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname,
+ dataFileDir + File.separator + KEY_STORE_NAME);
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname,
+ KEY_STORE_PASSWORD);
+ miniHS2.start();
+ }
+
+}
diff --git a/jdbc/src/test/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java b/jdbc/src/test/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java
new file mode 100644
index 0000000..1d0bf3b
--- /dev/null
+++ b/jdbc/src/test/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java
@@ -0,0 +1,111 @@
+package org.apache.hive.jdbc.miniHS2;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+
+/***
+ * Base class for Hive service
+ * AbstarctHiveService.
+ *
+ */
+public abstract class AbstarctHiveService {
+ private HiveConf hiveConf = null;
+ private String hostname;
+ private int port;
+ private boolean startedHiveService = false;
+
+ public AbstarctHiveService(HiveConf hiveConf, String hostname, int port) {
+ this.hiveConf = hiveConf;
+ this.hostname = hostname;
+ this.port = port;
+ }
+
+ /**
+ * Get Hive conf
+ * @return
+ */
+ public HiveConf getHiveConf() {
+ return hiveConf;
+ }
+
+ /**
+ * Get config property
+ * @param propertyKey
+ * @return
+ */
+ public String getConfProperty(String propertyKey) {
+ return hiveConf.get(propertyKey);
+ }
+
+ /**
+ * Set config property
+ * @param propertyKey
+ * @param propertyValue
+ */
+ public void setConfProperty(String propertyKey, String propertyValue) {
+ System.setProperty(propertyKey, propertyValue);
+ hiveConf.set(propertyKey, propertyValue);
+ }
+
+ /**
+ * Retrieve warehouse directory
+ * @return
+ */
+ public Path getWareHouseDir() {
+ return new Path(hiveConf.getVar(ConfVars.METASTOREWAREHOUSE));
+ }
+
+ public void setWareHouseDir(String wareHouseURI) {
+ verifyNotStarted();
+ System.setProperty(ConfVars.METASTOREWAREHOUSE.varname, wareHouseURI);
+ hiveConf.setVar(ConfVars.METASTOREWAREHOUSE, wareHouseURI);
+ }
+
+ /**
+ * Set service host
+ * @param hostName
+ */
+ public void setHost(String hostName) {
+ this.hostname = hostName;
+ }
+
+ // get service host
+ protected String getHost() {
+ return hostname;
+ }
+
+ /**
+ * Set service port #
+ * @param portNum
+ */
+ public void setPort(int portNum) {
+ this.port = portNum;
+ }
+
+ // get service port#
+ protected int getPort() {
+ return port;
+ }
+
+ public boolean isStarted() {
+ return startedHiveService;
+ }
+
+ protected void setStarted(boolean hiveServiceStatus) {
+ this.startedHiveService = hiveServiceStatus;
+ }
+
+ protected void verifyStarted() {
+ if (!isStarted()) {
+ throw new IllegalStateException("HS2 is not running");
+ }
+ }
+
+ protected void verifyNotStarted() {
+ if (isStarted()) {
+ throw new IllegalStateException("HS2 alreadyrunning");
+ }
+ }
+
+}
diff --git a/jdbc/src/test/org/apache/hive/jdbc/miniHS2/MiniHS2.java b/jdbc/src/test/org/apache/hive/jdbc/miniHS2/MiniHS2.java
new file mode 100644
index 0000000..cdaf793
--- /dev/null
+++ b/jdbc/src/test/org/apache/hive/jdbc/miniHS2/MiniHS2.java
@@ -0,0 +1,101 @@
+package org.apache.hive.jdbc.miniHS2;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hive.service.Service;
+import org.apache.hive.service.cli.CLIServiceClient;
+import org.apache.hive.service.cli.SessionHandle;
+import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
+import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
+import org.apache.hive.service.server.HiveServer2;
+
+import com.google.common.io.Files;
+
+public class MiniHS2 extends AbstarctHiveService {
+ private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
+ private HiveServer2 hiveServer2 = null;
+ private final File baseDir;
+ private static final AtomicLong hs2Counter = new AtomicLong();
+
+ public MiniHS2(HiveConf hiveConf) throws IOException {
+ super(hiveConf, "localhost", MetaStoreUtils.findFreePort());
+ baseDir = Files.createTempDir();
+ setWareHouseDir("file://" + baseDir.getPath() + File.separator + "warehouse");
+ String metaStoreURL = "jdbc:derby:" + baseDir.getAbsolutePath() + File.separator + "test_metastore-" +
+ hs2Counter.incrementAndGet() + ";create=true";
+
+ System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
+ hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStoreURL);
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, getHost());
+ hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getPort());
+ HiveMetaStore.HMSHandler.resetDefaultDBFlag();
+ }
+
+ public void start() throws Exception {
+ hiveServer2 = new HiveServer2();
+ hiveServer2.init(getHiveConf());
+ hiveServer2.start();
+ waitForStartup();
+ setStarted(true);
+ }
+
+ public void stop() {
+ verifyStarted();
+ hiveServer2.stop();
+ setStarted(false);
+ FileUtils.deleteQuietly(baseDir);
+ }
+
+ public CLIServiceClient getServiceClient() {
+ verifyStarted();
+ return getServiceClientInternal();
+ }
+
+ public CLIServiceClient getServiceClientInternal() {
+ for (Service service : hiveServer2.getServices()) {
+ if (service instanceof ThriftBinaryCLIService) {
+ return new ThriftCLIServiceClient((ThriftBinaryCLIService)service);
+ }
+ }
+ throw new IllegalStateException("HS2 not running Thrift service");
+ }
+
+ public String getJdbcURL() {
+ return "jdbc:hive2://" + getHost() + ":" + getPort() + "/default";
+ }
+
+ public static String getJdbcDriverName() {
+ return driverName;
+ }
+
+ private void waitForStartup() throws Exception {
+ int waitTime = 0;
+ long startupTimeout = 1000L * 1000000000L;
+ CLIServiceClient hs2Client = getServiceClientInternal();
+ SessionHandle sessionHandle = null;
+ do {
+ Thread.sleep(500L);
+ waitTime += 500L;
+ if (waitTime > startupTimeout) {
+ throw new TimeoutException("Couldn't access new HiveServer: " + getJdbcURL());
+ }
+ try {
+ sessionHandle = hs2Client.openSession("foo", "bar");
+ } catch (Exception e) {
+ // service not started yet
+ continue;
+ }
+ hs2Client.closeSession(sessionHandle);
+ break;
+ } while (true);
+ }
+
+}
diff --git a/jdbc/src/test/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java b/jdbc/src/test/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java
new file mode 100644
index 0000000..3cba38e
--- /dev/null
+++ b/jdbc/src/test/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java
@@ -0,0 +1,51 @@
+package org.apache.hive.jdbc.miniHS2;
+
+import static org.junit.Assert.assertFalse;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.cli.CLIServiceClient;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestHiveServer2 {
+
+ private static MiniHS2 miniHS2 = null;
+ private Map confOverlay;
+
+ @BeforeClass
+ public static void beforeTest() throws IOException {
+ miniHS2 = new MiniHS2(new HiveConf());
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ miniHS2.start();
+ confOverlay = new HashMap();
+ }
+
+ @After
+ public void tearDown() {
+ miniHS2.stop();
+ }
+
+ @Test
+ public void testConnection() throws Exception {
+ String tabName = "testTab1";
+ CLIServiceClient serviceClient = miniHS2.getServiceClient();
+ SessionHandle sessHandle = serviceClient.openSession("foo", "bar");
+ serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS tab", confOverlay);
+ serviceClient.executeStatement(sessHandle, "CREATE TABLE " + tabName + " (id INT)", confOverlay);
+ OperationHandle opHandle = serviceClient.executeStatement(sessHandle, "SHOW TABLES", confOverlay);
+ RowSet rowSet = serviceClient.fetchResults(opHandle);
+ assertFalse(rowSet.getSize() == 0);
+ }
+}
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 24b1832..17a4f4e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -277,6 +277,10 @@ public static Integer get() {
return threadLocalId.get();
}
+ public static void resetDefaultDBFlag() {
+ createDefaultDB = false;
+ }
+
public HMSHandler(String name) throws MetaException {
super(name);
hiveConf = new HiveConf(this.getClass());
@@ -4085,7 +4089,6 @@ public static IHMSHandler newHMSHandler(String name, HiveConf hiveConf) throws M
}
-
/**
* Discard a current delegation token.
*
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 5a66a6c..d80649f 100644
--- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -18,6 +18,12 @@
package org.apache.hive.service.auth;
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.Map;
import javax.security.auth.login.LoginException;
import javax.security.sasl.Sasl;
@@ -28,15 +34,15 @@
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
import org.apache.hive.service.cli.thrift.ThriftCLIService;
import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TSSLTransportFactory;
+import org.apache.thrift.transport.TServerSocket;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.text.MessageFormat;
-import java.util.HashMap;
-import java.util.Map;
-
public class HiveAuthFactory {
private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
@@ -153,4 +159,44 @@ public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
}
}
+ public static TTransport getSocketTransport(String host, int port, int loginTimeout)
+ throws TTransportException {
+ return new TSocket(host, port, loginTimeout);
+ }
+
+ public static TTransport getSSLSocket(String host, int port, int loginTimeout)
+ throws TTransportException {
+ return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
+ }
+
+ public static TTransport getSSLSocket(String host, int port, int loginTimeout,
+ String trustStorePath, String trustStorePassWord) throws TTransportException {
+ TSSLTransportFactory.TSSLTransportParameters params =
+ new TSSLTransportFactory.TSSLTransportParameters();
+ params.setTrustStore(trustStorePath, trustStorePassWord);
+ params.requireClientAuth(true);
+ return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
+ }
+
+ public static TServerSocket getServerSocket(String hiveHost, int portNum)
+ throws TTransportException {
+ InetSocketAddress serverAddress = null;
+ if (hiveHost != null && !hiveHost.isEmpty()) {
+ serverAddress = new InetSocketAddress(hiveHost, portNum);
+ } else {
+ serverAddress = new InetSocketAddress(portNum);
+ }
+ return new TServerSocket(serverAddress );
+ }
+
+ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum,
+ String keyStorePath, String keyStorePassWord) throws TTransportException, UnknownHostException {
+ TSSLTransportFactory.TSSLTransportParameters params =
+ new TSSLTransportFactory.TSSLTransportParameters();
+ params.setKeyStore(keyStorePath, keyStorePassWord);
+
+ return TSSLTransportFactory.getServerSocket(portNum, 10000,
+ InetAddress.getByName(hiveHost), params);
+ }
+
}
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
index 9c8f5c1..fae9dfa 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
@@ -64,7 +64,19 @@ public void run() {
minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
- TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress))
+ TServerSocket serverSocket = null;
+ if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) {
+ serverSocket = HiveAuthFactory.getServerSocket(hiveHost, portNum);
+ } else {
+ String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH);
+ if (keyStorePath.isEmpty()) {
+ throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname +
+ " Not configured for SSL connection");
+ }
+ serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum,
+ keyStorePath, hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD));
+ }
+ TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket)
.processorFactory(processorFactory)
.transportFactory(transportFactory)
.protocolFactory(new TBinaryProtocol.Factory())
diff --git a/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java b/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
index 623ebcd..17f4a94 100644
--- a/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
+++ b/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
@@ -611,6 +611,10 @@ public Path createDelegationTokenFile(Configuration conf) throws IOException {
@Override
public UserGroupInformation createRemoteUser(String userName, List groupNames) {
+ if (groupNames.isEmpty()) {
+ groupNames = new ArrayList();
+ groupNames.add(userName);
+ }
return new UnixUserGroupInformation(userName, groupNames.toArray(new String[0]));
}