diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 8cd6563..90f09e1 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -719,6 +719,12 @@
HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null),
HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"),
+ HIVE_SERVER2_SERVERMODE("hive.server2.servermode", "thrift"), //thrift or http
+ HIVE_SERVER2_HTTP_PORT("hive.server2.http.port", 10000),
+ HIVE_SERVER2_HTTP_PATH("hive.server2.http.path", ""),
+ HIVE_SERVER2_HTTP_MIN_WORKER_THREADS("hive.server2.http.min.worker.threads", 5),
+ HIVE_SERVER2_HTTP_MAX_WORKER_THREADS("hive.server2.http.max.worker.threads", 100),
+
HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5),
HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 100),
diff --git a/conf/hive-default.xml.template b/conf/hive-default.xml.template
index 5de5965..d337c3f 100644
--- a/conf/hive-default.xml.template
+++ b/conf/hive-default.xml.template
@@ -887,7 +887,36 @@
Read from a binary stream and treat each hive.binary.record.max.length bytes as a record.
The last record before the end of stream can have less than hive.binary.record.max.length bytes
+
+
+ hive.server2.servermode
+ thrift
+ Server endpoint mode. "thrift" or "http".
+
+
+ hive.server2.http.port
+ 10000
+ Port number when in HTTP mode.
+
+
+
+ hive.server2.http.path
+ hs2
+ Path component of URL endpoint when in HTTP mode.
+
+
+
+ hive.server2.http.min.worker.threads
+ 5
+ Minimum number of worker threads when in HTTP mode.
+
+
+
+ hive.server2.http.max.worker.threads
+ 100
+ Maximum number of worker threads when in HTTP mode.
+
hive.script.recordreader
@@ -1732,6 +1761,8 @@
+
+
hive.hmshandler.retry.attempts
1
diff --git a/eclipse-templates/.classpath b/eclipse-templates/.classpath
index 81b0d06..1a88e2a 100644
--- a/eclipse-templates/.classpath
+++ b/eclipse-templates/.classpath
@@ -76,6 +76,8 @@
+
+
diff --git a/ivy/libraries.properties b/ivy/libraries.properties
index ec59925..2753f36 100644
--- a/ivy/libraries.properties
+++ b/ivy/libraries.properties
@@ -45,6 +45,8 @@ commons-pool.version=1.5.4
derby.version=10.4.2.0
guava.version=11.0.2
hbase.version=0.94.6.1
+httpclient.version=4.2.5
+httpcore.version=4.2.4
jackson.version=1.8.8
javaewah.version=0.3.2
jdo-api.version=3.0.1
@@ -69,3 +71,4 @@ velocity.version=1.5
metrics-core.version=2.1.2
zookeeper.version=3.4.3
javolution.version=5.5.1
+
diff --git a/jdbc/ivy.xml b/jdbc/ivy.xml
index 2bf78a6..b9d0cea 100644
--- a/jdbc/ivy.xml
+++ b/jdbc/ivy.xml
@@ -29,5 +29,10 @@
+
+
+
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 00f4351..a0ee26e 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -42,6 +42,7 @@
import javax.security.sasl.SaslException;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
@@ -52,6 +53,7 @@
import org.apache.hive.service.cli.thrift.TOpenSessionResp;
import org.apache.hive.service.cli.thrift.TProtocolVersion;
import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
@@ -72,6 +74,7 @@
private static final String HIVE_ANONYMOUS_USER = "anonymous";
private static final String HIVE_ANONYMOUS_PASSWD = "anonymous";
+
private TTransport transport;
private TCLIService.Iface client;
private boolean isClosed = true;
@@ -94,7 +97,8 @@ public HiveConnection(String uri, Properties info) throws SQLException {
}
}
- openTransport(uri, connParams.getHost(), connParams.getPort(), connParams.getSessionVars());
+ openTransport(uri, connParams.getHost(), connParams.getPort(),
+ connParams.getSessionVars(), connParams.getHiveConfs());
}
// currently only V1 is supported
@@ -118,39 +122,74 @@ private void configureConnection(Utils.JdbcConnectionParams connParams)
Statement stmt = createStatement();
for (Entry hiveConf : connParams.getHiveConfs().entrySet()) {
stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
- stmt.close();
}
+ stmt.close();
}
}
- private void openTransport(String uri, String host, int port, Map sessConf )
+ private void openTransport(String uri, String host, int port, Map sessConf, Map hiveConfVars)
throws SQLException {
- transport = new TSocket(host, port);
- // handle secure connection if specified
- if (!sessConf.containsKey(HIVE_AUTH_TYPE)
- || !sessConf.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)){
+ if(!uri.startsWith(Utils.URL_PREFIX)){
+ throw new SQLException("Unknown transport protocol prefix: " + uri);
+ }
+
+ String serverMode =
+ hiveConfVars.get(HiveConf.ConfVars.HIVE_SERVER2_SERVERMODE.varname);
+
+ if(serverMode != null && (serverMode.equalsIgnoreCase("http") ||
+ serverMode.equalsIgnoreCase("https"))){
+ String httpPath; // should end up like "/" or "/path" or "/path/"
+ httpPath = hiveConfVars.get(HiveConf.ConfVars.HIVE_SERVER2_HTTP_PATH.varname);
+ if(httpPath == null){
+ httpPath = "/";
+ }
+ if(!httpPath.startsWith("/")){
+ httpPath = "/" + httpPath;
+ }
+
+ DefaultHttpClient httpClient = new DefaultHttpClient();
+ String httpUri = serverMode + "://" + host + ":" + port + httpPath;
+
+ httpClient.addRequestInterceptor(
+ new HttpBasicAuthInterceptor(getUserName(sessConf), getPasswd(sessConf))
+ );
+
try {
- if (sessConf.containsKey(HIVE_AUTH_PRINCIPAL)) {
- transport = KerberosSaslHelper.getKerberosTransport(
- sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport);
- } else {
- String userName = sessConf.get(HIVE_AUTH_USER);
- if ((userName == null) || userName.isEmpty()) {
- userName = HIVE_ANONYMOUS_USER;
- }
- String passwd = sessConf.get(HIVE_AUTH_PASSWD);
- if ((passwd == null) || passwd.isEmpty()) {
- passwd = HIVE_ANONYMOUS_PASSWD;
+ transport = new org.apache.thrift.transport.THttpClient(httpUri, httpClient);
+ }
+ catch (TTransportException tte){
+ String msg = "Could not establish connection to " +
+ uri + ". " + tte.getMessage();
+ throw new SQLException(msg, " 08S01", tte);
+ }
+ }
+ else {
+ transport = new TSocket(host, port);
+
+ // handle secure connection if specified
+ if (!sessConf.containsKey(HIVE_AUTH_TYPE)
+ || !sessConf.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)){
+ try {
+ if (sessConf.containsKey(HIVE_AUTH_PRINCIPAL)) {
+ transport = KerberosSaslHelper.getKerberosTransport(
+ sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport);
+ } else {
+ transport = PlainSaslHelper.getPlainTransport(
+ getUserName(sessConf),
+ getPasswd(sessConf),
+ transport
+ );
}
- transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
+ } catch (SaslException e) {
+ throw new SQLException("Could not establish secure connection to "
+ + uri + ": " + e.getMessage(), " 08S01", e);
}
- } catch (SaslException e) {
- throw new SQLException("Could not establish secure connection to "
- + uri + ": " + e.getMessage(), " 08S01", e);
}
}
+
TProtocol protocol = new TBinaryProtocol(transport);
client = new TCLIService.Client(protocol);
try {
@@ -183,6 +222,43 @@ private void openSession(String uri) throws SQLException {
isClosed = false;
}
+
+
+ /**
+ * @param sessConf
+ * @return username from sessConf map
+ */
+ private String getUserName(Map sessConf) {
+ return getSessionValue(sessConf, HIVE_AUTH_USER, HIVE_ANONYMOUS_USER);
+ }
+
+ /**
+ * @param sessConf
+ * @return password from sessConf map
+ */
+ private String getPasswd(Map sessConf) {
+ return getSessionValue(sessConf, HIVE_AUTH_PASSWD, HIVE_ANONYMOUS_PASSWD);
+ }
+
+ /**
+ * Lookup varName in sessConf map, if its null or empty return the default
+ * value varDefault
+ * @param sessConf
+ * @param varName
+ * @param varDefault
+ * @return
+ */
+ private String getSessionValue(Map sessConf,
+ String varName, String varDefault) {
+ String varValue = sessConf.get(varName);
+ if ((varValue == null) || varValue.isEmpty()) {
+ varValue = varDefault;
+ }
+ return varValue;
+ }
+
+
+
public void abort(Executor executor) throws SQLException {
// JDK 1.7
throw new SQLException("Method not supported");
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
index 4c4cd85..3b7e32c 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
@@ -30,6 +30,9 @@
import java.util.jar.Manifest;
import java.util.logging.Logger;
import java.util.regex.Pattern;
+
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
+
/**
* HiveDriver.
*
@@ -50,11 +53,6 @@
private static final boolean JDBC_COMPLIANT = false;
/**
- * The required prefix for the connection URL.
- */
- private static final String URL_PREFIX = "jdbc:hive2://";
-
- /**
* If host is provided, without a port.
*/
private static final String DEFAULT_PORT = "10000";
@@ -99,7 +97,8 @@ public HiveDriver() {
*/
public boolean acceptsURL(String url) throws SQLException {
- return Pattern.matches(URL_PREFIX + ".*", url);
+
+ return Pattern.matches(Utils.URL_PREFIX + ".*", url);
}
/*
@@ -183,8 +182,8 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException {
info = new Properties();
}
- if ((url != null) && url.startsWith(URL_PREFIX)) {
- info = parseURL(url, info);
+ if ((url != null) && url.startsWith(Utils.URL_PREFIX)) {
+ info = parseURLforGetPropertyInfo(url, info);
}
DriverPropertyInfo hostProp = new DriverPropertyInfo(HOST_PROPERTY_KEY,
@@ -222,46 +221,38 @@ public boolean jdbcCompliant() {
/**
* Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and
* parses it. Everything after jdbc:hive// is optional.
- *
+ *
+ * The output from Utils.parseUrl() is massaged for the needs of getPropertyInfo
* @param url
* @param defaults
* @return
* @throws java.sql.SQLException
*/
- private Properties parseURL(String url, Properties defaults) throws SQLException {
+ private Properties parseURLforGetPropertyInfo(String url, Properties defaults) throws SQLException {
Properties urlProps = (defaults != null) ? new Properties(defaults)
: new Properties();
- if (url == null || !url.startsWith(URL_PREFIX)) {
+ if (url == null || !url.startsWith(Utils.URL_PREFIX)) {
throw new SQLException("Invalid connection url: " + url);
}
-
- if (url.length() <= URL_PREFIX.length()) {
- return urlProps;
+
+ JdbcConnectionParams params = Utils.parseURL(url);
+ String host = params.getHost();
+ if (host == null){
+ host = "";
}
-
- // [hostname]:[port]/[db_name]
- String connectionInfo = url.substring(URL_PREFIX.length());
-
- // [hostname]:[port] [db_name]
- String[] hostPortAndDatabase = connectionInfo.split("/", 2);
-
- // [hostname]:[port]
- if (hostPortAndDatabase[0].length() > 0) {
- String[] hostAndPort = hostPortAndDatabase[0].split(":", 2);
- urlProps.put(HOST_PROPERTY_KEY, hostAndPort[0]);
- if (hostAndPort.length > 1) {
- urlProps.put(PORT_PROPERTY_KEY, hostAndPort[1]);
- } else {
- urlProps.put(PORT_PROPERTY_KEY, DEFAULT_PORT);
- }
+ String port = Integer.toString(params.getPort());
+ if(host.equals("")){
+ port = "";
}
-
- // [db_name]
- if (hostPortAndDatabase.length > 1) {
- urlProps.put(DBNAME_PROPERTY_KEY, hostPortAndDatabase[1]);
+ else if(port.equals("0")){
+ port = DEFAULT_PORT;
}
-
+ String db = params.getDbName();
+ urlProps.put(HOST_PROPERTY_KEY, host);
+ urlProps.put(PORT_PROPERTY_KEY, port);
+ urlProps.put(DBNAME_PROPERTY_KEY, db);
+
return urlProps;
}
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
new file mode 100644
index 0000000..b1c8c03
--- /dev/null
+++ b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
@@ -0,0 +1,34 @@
+package org.apache.hive.jdbc;
+
+import java.io.IOException;
+
+import org.apache.http.Header;
+import org.apache.http.HttpException;
+import org.apache.http.HttpRequest;
+import org.apache.http.HttpRequestInterceptor;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.impl.auth.BasicScheme;
+import org.apache.http.protocol.HttpContext;
+
+/**
+ * The class is instantiated with the username and password, it is then
+ * used to add header with these credentials to HTTP requests
+ *
+ */
+public class HttpBasicAuthInterceptor implements HttpRequestInterceptor {
+
+ Header basicAuthHeader;
+ public HttpBasicAuthInterceptor(String username, String password){
+ if(username != null){
+ UsernamePasswordCredentials creds = new UsernamePasswordCredentials(username, password);
+ basicAuthHeader = BasicScheme.authenticate(creds, "UTF-8", false);
+ }
+ }
+
+ @Override
+ public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException {
+ if(basicAuthHeader != null){
+ httpRequest.addHeader(basicAuthHeader);
+ }
+ }
+}
diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java
index d907f0a..886f1fd 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java
@@ -173,6 +173,9 @@ public static void verifySuccess(TStatus status, boolean withInfo) throws SQLExc
* jdbc:hive://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
* jdbc:hive://ubuntu:11000/db2;user=foo;password=bar
*
+ * Connect to http://server:10001/hs2, with specified basicAuth credentials and initial database:
+ * jdbc:hive://server:10001/db;user=foo;password=bar?hive.server2.servermode=http;hive.server2.http.path=hs2
+ *
* Note that currently the session properties are not used.
*
* @param uri
@@ -181,7 +184,7 @@ public static void verifySuccess(TStatus status, boolean withInfo) throws SQLExc
public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentException {
JdbcConnectionParams connParams = new JdbcConnectionParams();
- if (!uri.startsWith(URL_PREFIX)) {
+ if (! uri.startsWith(URL_PREFIX)) {
throw new IllegalArgumentException("Bad URL format");
}
diff --git a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
index fd990a8..1bb09c2 100644
--- a/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -43,6 +43,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.common.util.HiveVersionInfo;
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
/**
@@ -1115,10 +1116,18 @@ public void testResultSetMetaData() throws SQLException {
// [url] [host] [port] [db]
private static final String[][] URL_PROPERTIES = new String[][] {
+ // tcp mode
{"jdbc:hive2://", "", "", "default"},
{"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
{"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
- {"jdbc:hive2://foo:1243", "foo", "1243", "default"}};
+ {"jdbc:hive2://foo:1243", "foo", "1243", "default"},
+
+ // http mode
+ {"jdbc:hive2://server:10002/db;user=foo;password=bar?" +
+ "hive.server2.servermode=http;" +
+ "hive.server2.http.path=hs2",
+ "server", "10002", "db"},
+ };
public void testDriverProperties() throws SQLException {
HiveDriver driver = new HiveDriver();
@@ -1130,7 +1139,30 @@ public void testDriverProperties() throws SQLException {
assertDpi(dpi[1], "PORT", testValues[2]);
assertDpi(dpi[2], "DBNAME", testValues[3]);
}
+ }
+ private static final String[][] HTTP_URL_PROPERTIES = new String[][] {
+ {"jdbc:hive2://server:10002/db;" +
+ "user=foo;password=bar?" +
+ "hive.server2.servermode=http;" +
+ "hive.server2.http.path=hs2", "server", "10002", "db", "http", "hs2"},
+ {"jdbc:hive2://server:10000/testdb;" +
+ "user=foo;password=bar?" +
+ "hive.server2.servermode=thrift;" +
+ "hive.server2.http.path=", "server", "10000", "testdb", "thrift", ""},
+ };
+
+ public void testParseUrlHttpMode() throws SQLException {
+ HiveDriver driver = new HiveDriver();
+
+ for (String[] testValues : HTTP_URL_PROPERTIES) {
+ JdbcConnectionParams params = Utils.parseURL(testValues[0]);
+ assertEquals(params.getHost(), testValues[1]);
+ assertEquals(params.getPort(), Integer.parseInt(testValues[2]));
+ assertEquals(params.getDbName(), testValues[3]);
+ assertEquals(params.getHiveConfs().get("hive.server2.servermode"), testValues[4]);
+ assertEquals(params.getHiveConfs().get("hive.server2.http.path"), testValues[5]);
+ }
}
private static void assertDpi(DriverPropertyInfo dpi, String name,
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index a6e87c4..3a27b7a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -230,6 +230,16 @@ public String getSessionId() {
private static ThreadLocal tss = new ThreadLocal();
/**
+ * if true, thread-local session state is initialized lazily
+ * This is used by hiveserver1-http mode to initialize its worker threads.
+ */
+ private static boolean lazyInitForHttp = false;
+
+ public static void setLazyInitForHttp(boolean value){
+ lazyInitForHttp = value;
+ }
+
+ /**
* start a new session and set it to current session.
*/
public static SessionState start(HiveConf conf) {
@@ -284,6 +294,17 @@ public static SessionState start(SessionState startSs) {
* get the current session.
*/
public static SessionState get() {
+ SessionState tss1 = tss.get();
+ if(tss1 == null && lazyInitForHttp){
+ // this path should only be used by HiveServer in HTTP mode.
+ // we don't have a reliable way to initialize worker threads "a priori", hence we
+ // do it here on-demand.
+ HiveConf hconf = new HiveConf();
+ SessionState ss = new SessionState(hconf);
+ SessionState.start(ss);
+ tss.set(ss);
+ }
+
return tss.get();
}
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/HttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/HttpServlet.java
new file mode 100644
index 0000000..079701d
--- /dev/null
+++ b/service/src/java/org/apache/hive/service/cli/thrift/HttpServlet.java
@@ -0,0 +1,67 @@
+package org.apache.hive.service.cli.thrift;
+
+import java.io.IOException;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.thrift.TProcessor;
+import org.apache.thrift.protocol.TProtocolFactory;
+import org.apache.thrift.server.TServlet;
+
+public class HttpServlet extends TServlet {
+
+ private static final long serialVersionUID = 1L;
+ public static final Log LOG = LogFactory.getLog(HttpServlet.class.getName());
+
+ public HttpServlet(TProcessor processor, TProtocolFactory protocolFactory) {
+ super(processor, protocolFactory);
+ }
+
+ @Override
+ protected void doPost(HttpServletRequest request, HttpServletResponse response)
+ throws ServletException, IOException {
+
+ logRequestHeader(request);
+ super.doPost(request, response);
+ }
+
+ protected void logRequestHeader(HttpServletRequest request){
+
+ String authHeaderBase64 = request.getHeader("Authorization");
+
+ if(authHeaderBase64 == null){
+ LOG.warn("HttpServlet: no HTTP Authorization header");
+ }
+ else {
+ if(!authHeaderBase64.startsWith("Basic")){
+ LOG.warn("HttpServlet: HTTP Authorization header exists but is not Basic.");
+ }
+ else if(LOG.isDebugEnabled()) {
+ String authHeaderBase64_Payload = authHeaderBase64.substring("Basic ".length());
+ String authHeaderString = org.apache.commons.codec.binary.StringUtils.newStringUtf8(org.apache.commons.codec.binary.Base64.decodeBase64(authHeaderBase64_Payload.getBytes()));
+ String[] creds = authHeaderString.split(":");
+ String username=null;
+ String password=null;
+ if(creds.length >= 1 ){
+ username = creds[0];
+ }
+ if(creds.length >= 2){
+ password = creds[1];
+ }
+
+ if(password == null || password.equals("null") || password.equals("")){
+ password = "";
+ }
+ else {
+ password = "******"; // don't log the actual password.
+ }
+
+ LOG.debug("HttpServlet: HTTP Authorization header:: username=" + username + " password=" + password);
+ }
+ }
+ }
+}
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 0788ead..e66eba5 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -29,8 +29,11 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.service.HiveServer.HiveServerHandler;
+import org.apache.hadoop.util.Shell;
import org.apache.hive.service.AbstractService;
import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.GetInfoType;
@@ -41,15 +44,20 @@
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.cli.TableSchema;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.thrift.TException;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TProtocolFactory;
import org.apache.thrift.server.TServer;
+import org.apache.thrift.server.TServlet;
import org.apache.thrift.server.TThreadPoolServer;
import org.apache.thrift.transport.TServerSocket;
import org.apache.thrift.transport.TTransportFactory;
-
-
+import org.mortbay.jetty.nio.SelectChannelConnector;
+import org.mortbay.jetty.servlet.Context;
+import org.mortbay.jetty.servlet.ServletHolder;
+import org.mortbay.thread.QueuedThreadPool;
/**
* CLIService.
*
@@ -396,48 +404,162 @@ public TFetchResultsResp FetchResults(TFetchResultsReq req) throws TException {
@Override
public void run() {
try {
- hiveAuthFactory = new HiveAuthFactory();
- TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
- TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
-
- String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
- if (portString != null) {
- portNum = Integer.valueOf(portString);
- } else {
- portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
- }
- String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
- if (hiveHost == null) {
- hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
+ String serverMode = System.getenv("HIVE_SERVER2_SERVERMODE");
+ if(serverMode == null){
+ serverMode = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_SERVERMODE);
}
+ if(serverMode.equals("thrift")){
- if (hiveHost != null && !hiveHost.isEmpty()) {
- serverAddress = new InetSocketAddress(hiveHost, portNum);
- } else {
- serverAddress = new InetSocketAddress(portNum);
- }
+ hiveAuthFactory = new HiveAuthFactory();
+ TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
+ TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
+
+ String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
+ if (portString != null) {
+ portNum = Integer.valueOf(portString);
+ } else {
+ portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
+ }
+ String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
+ if (hiveHost == null) {
+ hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
+ }
- minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
- maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
+ if (hiveHost != null && !hiveHost.isEmpty()) {
+ serverAddress = new InetSocketAddress(hiveHost, portNum);
+ } else {
+ serverAddress = new InetSocketAddress(portNum);
+ }
- TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress))
- .processorFactory(processorFactory)
- .transportFactory(transportFactory)
- .protocolFactory(new TBinaryProtocol.Factory())
- .minWorkerThreads(minWorkerThreads)
- .maxWorkerThreads(maxWorkerThreads);
+ minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
+ maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
- server = new TThreadPoolServer(sargs);
- LOG.info("ThriftCLIService listening on " + serverAddress);
+ TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress))
+ .processorFactory(processorFactory)
+ .transportFactory(transportFactory)
+ .protocolFactory(new TBinaryProtocol.Factory())
+ .minWorkerThreads(minWorkerThreads)
+ .maxWorkerThreads(maxWorkerThreads);
- server.serve();
+ server = new TThreadPoolServer(sargs);
+
+ LOG.info("ThriftCLIService listening on " + serverAddress);
+
+ server.serve();
+ }
+ else if(serverMode.equals("http")){
+ // Configure Jetty to serve http requests
+ // Example of a client connection URI: http://localhost:10000/servlets/thrifths2/
+ // a gateway may cause actual target URI to differ, eg http://gateway:port/hive2/servlets/thrifths2/
+
+ verifyHttpConfiguration(hiveConf);
+
+ String portString = System.getenv("HIVE_SERVER2_HTTP_PORT");
+ if (portString != null) {
+ portNum = Integer.valueOf(portString);
+ } else {
+ portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_HTTP_PORT);
+ }
+
+ minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_HTTP_MIN_WORKER_THREADS);
+ maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_HTTP_MAX_WORKER_THREADS);
+
+ String httpPath = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_HTTP_PATH);
+ //The config parameter can be like "path", "/path", "/path/", "path/*", "/path1/path2/*" and so on.
+ //httpPath should end up as "/*", "/path/*" or "/path1/../pathN/*"
+ if(httpPath == null || httpPath.equals("")){
+ httpPath = "/*";
+ }
+ else {
+ if(!httpPath.startsWith("/")){
+ httpPath = "/" + httpPath;
+ }
+ if(httpPath.endsWith("/")){
+ httpPath = httpPath + "*";
+ }
+ if(!httpPath.endsWith("/*")){
+ httpPath = httpPath + "/*";
+ }
+ }
+
+ org.mortbay.jetty.Server httpServer = new org.mortbay.jetty.Server();
+
+ QueuedThreadPool threadPool = new QueuedThreadPool();
+ threadPool.setMinThreads(minWorkerThreads);
+ threadPool.setMaxThreads(maxWorkerThreads);
+ httpServer.setThreadPool(threadPool);
+ SelectChannelConnector connector = new SelectChannelConnector();
+ connector.setPort(portNum);
+
+ connector.setReuseAddress(!Shell.WINDOWS); // Linux:yes, Windows:no
+ httpServer.addConnector(connector);
+
+ new org.apache.hive.service.cli.thrift.TCLIService.Processor(new EmbeddedThriftCLIService());
+ org.apache.hive.service.cli.thrift.TCLIService.Processor processor
+ = new org.apache.hive.service.cli.thrift.TCLIService.Processor(new EmbeddedThriftCLIService());
+
+ TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
+ TServlet thriftServlet = new HttpServlet(processor,protocolFactory);
+ final Context context = new Context(httpServer, "/", Context.SESSIONS);
+ context.addServlet(new ServletHolder(thriftServlet), httpPath);
+
+ //TODO: check defaults: maxTimeout,keepalive,maxBodySize,bodyRecieveDuration, etc.
+ // set session state to initialize itself (for the worker threads)
+ SessionState.setLazyInitForHttp(true);
+ httpServer.start();
+ String msg = "Starting HiveServer2 in Http mode on port " + portNum +
+ " path=" + httpPath +
+ " with " + minWorkerThreads + ".." + maxWorkerThreads + " worker threads";
+ HiveServerHandler.LOG.info(msg);
+ httpServer.join();
+ }
+ else {
+ throw new Exception("unknown serverMode: " + serverMode);
+ }
} catch (Throwable t) {
t.printStackTrace();
}
}
+ /**
+ * Verify that this configuration is supported by servermode of HTTP
+ * @param hiveConf
+ */
+ private static void verifyHttpConfiguration(HiveConf hiveConf) {
+ String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
+
+ //error out if KERBEROS or LDAP mode is being used, it is not supported
+ if(authType.equalsIgnoreCase(AuthTypes.KERBEROS.toString()) ||
+ authType.equalsIgnoreCase(AuthTypes.LDAP.toString()) ||
+ authType.equalsIgnoreCase(AuthTypes.CUSTOM.toString())
+ ){
+ String msg = ConfVars.HIVE_SERVER2_AUTHENTICATION + " setting of " +
+ authType + " is currently not supported with " +
+ ConfVars.HIVE_SERVER2_SERVERMODE + " setting of http";
+ LOG.fatal(msg);
+ throw new RuntimeException(msg);
+ }
+
+ if(authType.equalsIgnoreCase(AuthTypes.NONE.toString())){
+ //NONE in case of thrift mode uses SASL
+ LOG.warn(ConfVars.HIVE_SERVER2_AUTHENTICATION + " setting to " +
+ authType + ". SASL is not supported with http servermode," +
+ " so using equivalent of " + AuthTypes.NOSASL);
+ }
+
+ //doAs is currently not supported with http
+ if(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)){
+ String msg = ConfVars.HIVE_SERVER2_ENABLE_DOAS + " setting of " +
+ "true is currently not supported with " +
+ ConfVars.HIVE_SERVER2_SERVERMODE + " setting of http";
+ LOG.fatal(msg);
+ throw new RuntimeException(msg);
+ }
+
+ }
+
}
diff --git a/service/src/test/org/apache/hive/service/server/TestHS2HttpInvalidConf.java b/service/src/test/org/apache/hive/service/server/TestHS2HttpInvalidConf.java
new file mode 100644
index 0000000..913b379
--- /dev/null
+++ b/service/src/test/org/apache/hive/service/server/TestHS2HttpInvalidConf.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.server;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test hive server2 using thrift over http transport.
+ *
+ */
+public class TestHS2HttpInvalidConf {
+ private static int portNum;
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ portNum = MetaStoreUtils.findFreePort();
+ Class.forName(org.apache.hive.jdbc.HiveDriver.class.getName());
+ }
+
+ private void startHS2WithConf(HiveConf hiveConf)
+ throws SQLException, IOException {
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_SERVERMODE, "http");
+ portNum = MetaStoreUtils.findFreePort();
+ hiveConf.setIntVar(ConfVars.HIVE_SERVER2_HTTP_PORT, portNum);
+
+ HiveServer2 hiveServer2 = new HiveServer2();
+ hiveServer2.init(hiveConf);
+ hiveServer2.start();
+
+ }
+
+
+ public void testWithAuthMode(AuthTypes authType) {
+ //test that invalid http path results in exception
+ boolean caughtEx = false;
+ try{
+ HiveConf hconf = new HiveConf();
+ hconf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, authType.toString());
+
+ //unfortunately, the startup can't throw an exception
+ // because of the way the service interfaces are
+ startHS2WithConf(hconf);
+
+ String url = "jdbc:hive2://localhost:" + portNum;
+
+ //this should throw an exception as the cluster will be down
+ DriverManager.getConnection(url, "", "");
+ }catch(SQLException e){
+ caughtEx = true;
+ } catch (IOException e) {
+ //this exception is not expected
+ e.printStackTrace();
+ }
+ assertTrue("exception expected", caughtEx);
+ }
+
+ @Test
+ public void testKerberosMode() {
+ testWithAuthMode(AuthTypes.KERBEROS);
+ }
+
+ @Test
+ public void testLDAPMode() {
+ testWithAuthMode(AuthTypes.LDAP);
+ }
+
+ @Test
+ public void testCustomMode() {
+ testWithAuthMode(AuthTypes.CUSTOM);
+ }
+}
diff --git a/service/src/test/org/apache/hive/service/server/TestHiveServer2Http.java b/service/src/test/org/apache/hive/service/server/TestHiveServer2Http.java
new file mode 100644
index 0000000..20de2cc
--- /dev/null
+++ b/service/src/test/org/apache/hive/service/server/TestHiveServer2Http.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.server;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test hive server2 using thrift over http transport.
+ *
+ */
+public class TestHiveServer2Http {
+
+ private static HiveServer2 hiveServer2;
+ private static int portNum;
+ private static final String HTTP_PATH = "hs2path";
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ HiveConf hiveConf = new HiveConf();
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_SERVERMODE, "http");
+ portNum = MetaStoreUtils.findFreePort();
+ hiveConf.setIntVar(ConfVars.HIVE_SERVER2_HTTP_PORT, portNum);
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_HTTP_PATH, HTTP_PATH);
+ hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
+ hiveServer2 = new HiveServer2();
+ hiveServer2.init(hiveConf);
+ hiveServer2.start();
+ Thread.sleep(1000);
+
+ Class.forName(org.apache.hive.jdbc.HiveDriver.class.getName());
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception {
+ if (hiveServer2 != null) {
+ hiveServer2.stop();
+ }
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void testPositive() throws SQLException {
+ test("http", HTTP_PATH);
+
+ }
+
+ private void test(String serverMode, String httpPath) throws SQLException {
+ String url = "jdbc:hive2://localhost:" + portNum
+ + "/default?"
+ + ConfVars.HIVE_SERVER2_SERVERMODE + "=" + serverMode
+ + ";"
+ + ConfVars.HIVE_SERVER2_HTTP_PATH + "=" + httpPath
+ ;
+
+ Connection con1 = DriverManager.getConnection(url, "", "");
+
+ assertNotNull("Connection is null", con1);
+ assertFalse("Connection should not be closed", con1.isClosed());
+
+ Statement stmt = con1.createStatement();
+ assertNotNull("Statement is null", stmt);
+
+ stmt.execute("show databases");
+
+ ResultSet res = stmt.getResultSet();
+ assertTrue("has at least one database", res.next());
+
+ stmt.close();
+ }
+
+ @Test
+ public void testInvalidPath() throws SQLException {
+ //test that invalid http path results in exception
+ boolean caughtEx = false;
+ try{
+ test("http", "invalidPath");
+ }catch(SQLException e){
+ caughtEx = true;
+ }
+ assertTrue("exception expected", caughtEx);
+
+ }
+
+ //Disabled test - resulted in OOM ex
+ //TODO investigate why the OOM happened
+ public void testIncorrectMode() throws SQLException {
+ //test that trying to connect using thrift results in exception
+ boolean caughtEx = false;
+ try{
+ test("thrift", "invalidPath");
+ }catch(SQLException e){
+ caughtEx = true;
+ }
+ assertTrue("exception expected", caughtEx);
+
+ }
+
+}