diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index cc775d9..563fc6d 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -714,6 +714,12 @@ HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null), HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"), + HIVE_SERVER2_SERVERMODE("hive.server2.servermode", "thrift"), //thrift or http + HIVE_SERVER2_HTTP_PORT("hive.server2.http.port", 10000), + HIVE_SERVER2_HTTP_PATH("hive.server2.http.path", ""), + HIVE_SERVER2_HTTP_MIN_WORKER_THREADS("hive.server2.http.min.worker.threads", 5), + HIVE_SERVER2_HTTP_MAX_WORKER_THREADS("hive.server2.http.max.worker.threads", 100), + HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5), HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 100), diff --git conf/hive-default.xml.template conf/hive-default.xml.template index 5de5965..d337c3f 100644 --- conf/hive-default.xml.template +++ conf/hive-default.xml.template @@ -887,7 +887,36 @@ Read from a binary stream and treat each hive.binary.record.max.length bytes as a record. The last record before the end of stream can have less than hive.binary.record.max.length bytes + + + hive.server2.servermode + thrift + Server endpoint mode. "thrift" or "http". + + + hive.server2.http.port + 10000 + Port number when in HTTP mode. + + + + hive.server2.http.path + hs2 + Path component of URL endpoint when in HTTP mode. + + + + hive.server2.http.min.worker.threads + 5 + Minimum number of worker threads when in HTTP mode. + + + + hive.server2.http.max.worker.threads + 100 + Maximum number of worker threads when in HTTP mode. + hive.script.recordreader @@ -1732,6 +1761,8 @@ + + hive.hmshandler.retry.attempts 1 diff --git eclipse-templates/.classpath eclipse-templates/.classpath index 81b0d06..1a88e2a 100644 --- eclipse-templates/.classpath +++ eclipse-templates/.classpath @@ -76,6 +76,8 @@ + + diff --git ivy/libraries.properties ivy/libraries.properties index 6e25064..c6d10dc 100644 --- ivy/libraries.properties +++ ivy/libraries.properties @@ -45,6 +45,8 @@ commons-pool.version=1.5.4 derby.version=10.4.2.0 guava.version=11.0.2 hbase.version=0.94.6.1 +httpclient.version=4.2.5 +httpcore.version=4.2.4 jackson.version=1.8.8 javaewah.version=0.3.2 jdo-api.version=2.3-ec @@ -69,3 +71,4 @@ velocity.version=1.5 metrics-core.version=2.1.2 zookeeper.version=3.4.3 javolution.version=5.5.1 + diff --git jdbc/ivy.xml jdbc/ivy.xml index 2bf78a6..b9d0cea 100644 --- jdbc/ivy.xml +++ jdbc/ivy.xml @@ -29,5 +29,10 @@ + + + diff --git jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 0e90fec..12d268f 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -41,6 +41,7 @@ import javax.security.sasl.SaslException; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.service.auth.KerberosSaslHelper; import org.apache.hive.service.auth.PlainSaslHelper; @@ -51,6 +52,7 @@ import org.apache.hive.service.cli.thrift.TOpenSessionResp; import org.apache.hive.service.cli.thrift.TProtocolVersion; import org.apache.hive.service.cli.thrift.TSessionHandle; +import org.apache.http.impl.client.DefaultHttpClient; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; @@ -71,6 +73,7 @@ private static final String HIVE_ANONYMOUS_USER = "anonymous"; private static final String HIVE_ANONYMOUS_PASSWD = "anonymous"; + private TTransport transport; private TCLIService.Iface client; private boolean isClosed = true; @@ -93,7 +96,8 @@ public HiveConnection(String uri, Properties info) throws SQLException { } } - openTransport(uri, connParams.getHost(), connParams.getPort(), connParams.getSessionVars()); + openTransport(uri, connParams.getHost(), connParams.getPort(), + connParams.getSessionVars(), connParams.getHiveConfs()); } // currently only V1 is supported @@ -117,39 +121,74 @@ private void configureConnection(Utils.JdbcConnectionParams connParams) Statement stmt = createStatement(); for (Entry hiveConf : connParams.getHiveConfs().entrySet()) { stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue()); - stmt.close(); } + stmt.close(); } } - private void openTransport(String uri, String host, int port, Map sessConf ) + private void openTransport(String uri, String host, int port, Map sessConf, Map hiveConfVars) throws SQLException { - transport = new TSocket(host, port); - // handle secure connection if specified - if (!sessConf.containsKey(HIVE_AUTH_TYPE) - || !sessConf.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)){ + if(!uri.startsWith(Utils.URL_PREFIX)){ + throw new SQLException("Unknown transport protocol prefix: " + uri); + } + + String serverMode = + hiveConfVars.get(HiveConf.ConfVars.HIVE_SERVER2_SERVERMODE.varname); + + if(serverMode != null && (serverMode.equalsIgnoreCase("http") || + serverMode.equalsIgnoreCase("https"))){ + String httpPath; // should end up like "/" or "/path" or "/path/" + httpPath = hiveConfVars.get(HiveConf.ConfVars.HIVE_SERVER2_HTTP_PATH.varname); + if(httpPath == null){ + httpPath = "/"; + } + if(!httpPath.startsWith("/")){ + httpPath = "/" + httpPath; + } + + DefaultHttpClient httpClient = new DefaultHttpClient(); + String httpUri = serverMode + "://" + host + ":" + port + httpPath; + + httpClient.addRequestInterceptor( + new HttpBasicAuthInterceptor(getUserName(sessConf), getPasswd(sessConf)) + ); + try { - if (sessConf.containsKey(HIVE_AUTH_PRINCIPAL)) { - transport = KerberosSaslHelper.getKerberosTransport( - sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport); - } else { - String userName = sessConf.get(HIVE_AUTH_USER); - if ((userName == null) || userName.isEmpty()) { - userName = HIVE_ANONYMOUS_USER; - } - String passwd = sessConf.get(HIVE_AUTH_PASSWD); - if ((passwd == null) || passwd.isEmpty()) { - passwd = HIVE_ANONYMOUS_PASSWD; + transport = new org.apache.thrift.transport.THttpClient(httpUri, httpClient); + } + catch (TTransportException tte){ + String msg = "Could not establish connection to " + + uri + ". " + tte.getMessage(); + throw new SQLException(msg, " 08S01", tte); + } + } + else { + transport = new TSocket(host, port); + + // handle secure connection if specified + if (!sessConf.containsKey(HIVE_AUTH_TYPE) + || !sessConf.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)){ + try { + if (sessConf.containsKey(HIVE_AUTH_PRINCIPAL)) { + transport = KerberosSaslHelper.getKerberosTransport( + sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport); + } else { + transport = PlainSaslHelper.getPlainTransport( + getUserName(sessConf), + getPasswd(sessConf), + transport + ); } - transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport); + } catch (SaslException e) { + throw new SQLException("Could not establish secure connection to " + + uri + ": " + e.getMessage(), " 08S01", e); } - } catch (SaslException e) { - throw new SQLException("Could not establish secure connection to " - + uri + ": " + e.getMessage(), " 08S01", e); } } + TProtocol protocol = new TBinaryProtocol(transport); client = new TCLIService.Client(protocol); try { @@ -160,6 +199,41 @@ private void openTransport(String uri, String host, int port, Map sessConf) { + return getSessionValue(sessConf, HIVE_AUTH_USER, HIVE_ANONYMOUS_USER); + } + + /** + * @param sessConf + * @return password from sessConf map + */ + private String getPasswd(Map sessConf) { + return getSessionValue(sessConf, HIVE_AUTH_PASSWD, HIVE_ANONYMOUS_PASSWD); + } + + /** + * Lookup varName in sessConf map, if its null or empty return the default + * value varDefault + * @param sessConf + * @param varName + * @param varDefault + * @return + */ + private String getSessionValue(Map sessConf, + String varName, String varDefault) { + String varValue = sessConf.get(varName); + if ((varValue == null) || varValue.isEmpty()) { + varValue = varDefault; + } + return varValue; + } + private void openSession(String uri) throws SQLException { TOpenSessionReq openReq = new TOpenSessionReq(); diff --git jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java index 2576914..68e027c 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java @@ -28,6 +28,9 @@ import java.util.jar.Attributes; import java.util.jar.Manifest; import java.util.regex.Pattern; + +import org.apache.hive.jdbc.Utils.JdbcConnectionParams; + /** * HiveDriver. * @@ -48,11 +51,6 @@ private static final boolean JDBC_COMPLIANT = false; /** - * The required prefix for the connection URL. - */ - private static final String URL_PREFIX = "jdbc:hive2://"; - - /** * If host is provided, without a port. */ private static final String DEFAULT_PORT = "10000"; @@ -97,7 +95,8 @@ public HiveDriver() { */ public boolean acceptsURL(String url) throws SQLException { - return Pattern.matches(URL_PREFIX + ".*", url); + + return Pattern.matches(Utils.URL_PREFIX + ".*", url); } /* @@ -176,8 +175,8 @@ public int getMinorVersion() { info = new Properties(); } - if ((url != null) && url.startsWith(URL_PREFIX)) { - info = parseURL(url, info); + if ((url != null) && url.startsWith(Utils.URL_PREFIX)) { + info = parseURLforGetPropertyInfo(url, info); } DriverPropertyInfo hostProp = new DriverPropertyInfo(HOST_PROPERTY_KEY, @@ -215,46 +214,38 @@ public boolean jdbcCompliant() { /** * Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and * parses it. Everything after jdbc:hive// is optional. - * + * + * The output from Utils.parseUrl() is massaged for the needs of getPropertyInfo * @param url * @param defaults * @return * @throws java.sql.SQLException */ - private Properties parseURL(String url, Properties defaults) throws SQLException { + private Properties parseURLforGetPropertyInfo(String url, Properties defaults) throws SQLException { Properties urlProps = (defaults != null) ? new Properties(defaults) : new Properties(); - if (url == null || !url.startsWith(URL_PREFIX)) { + if (url == null || !url.startsWith(Utils.URL_PREFIX)) { throw new SQLException("Invalid connection url: " + url); } - - if (url.length() <= URL_PREFIX.length()) { - return urlProps; + + JdbcConnectionParams params = Utils.parseURL(url); + String host = params.getHost(); + if (host == null){ + host = ""; } - - // [hostname]:[port]/[db_name] - String connectionInfo = url.substring(URL_PREFIX.length()); - - // [hostname]:[port] [db_name] - String[] hostPortAndDatabase = connectionInfo.split("/", 2); - - // [hostname]:[port] - if (hostPortAndDatabase[0].length() > 0) { - String[] hostAndPort = hostPortAndDatabase[0].split(":", 2); - urlProps.put(HOST_PROPERTY_KEY, hostAndPort[0]); - if (hostAndPort.length > 1) { - urlProps.put(PORT_PROPERTY_KEY, hostAndPort[1]); - } else { - urlProps.put(PORT_PROPERTY_KEY, DEFAULT_PORT); - } + String port = Integer.toString(params.getPort()); + if(host.equals("")){ + port = ""; } - - // [db_name] - if (hostPortAndDatabase.length > 1) { - urlProps.put(DBNAME_PROPERTY_KEY, hostPortAndDatabase[1]); + else if(port.equals("0")){ + port = DEFAULT_PORT; } - + String db = params.getDbName(); + urlProps.put(HOST_PROPERTY_KEY, host); + urlProps.put(PORT_PROPERTY_KEY, port); + urlProps.put(DBNAME_PROPERTY_KEY, db); + return urlProps; } diff --git jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java new file mode 100644 index 0000000..b1c8c03 --- /dev/null +++ jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java @@ -0,0 +1,34 @@ +package org.apache.hive.jdbc; + +import java.io.IOException; + +import org.apache.http.Header; +import org.apache.http.HttpException; +import org.apache.http.HttpRequest; +import org.apache.http.HttpRequestInterceptor; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.impl.auth.BasicScheme; +import org.apache.http.protocol.HttpContext; + +/** + * The class is instantiated with the username and password, it is then + * used to add header with these credentials to HTTP requests + * + */ +public class HttpBasicAuthInterceptor implements HttpRequestInterceptor { + + Header basicAuthHeader; + public HttpBasicAuthInterceptor(String username, String password){ + if(username != null){ + UsernamePasswordCredentials creds = new UsernamePasswordCredentials(username, password); + basicAuthHeader = BasicScheme.authenticate(creds, "UTF-8", false); + } + } + + @Override + public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { + if(basicAuthHeader != null){ + httpRequest.addHeader(basicAuthHeader); + } + } +} diff --git jdbc/src/java/org/apache/hive/jdbc/Utils.java jdbc/src/java/org/apache/hive/jdbc/Utils.java index 120c890..1c2fb86 100644 --- jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -173,6 +173,9 @@ public static void verifySuccess(TStatus status, boolean withInfo) throws SQLExc * jdbc:hive://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID * jdbc:hive://ubuntu:11000/db2;user=foo;password=bar * + * Connect to http://server:10001/hs2, with specified basicAuth credentials and initial database: + * jdbc:hive://server:10001/db;user=foo;password=bar?hive.server2.servermode=http;hive.server2.http.path=hs2 + * * Note that currently the session properties are not used. * * @param uri @@ -181,7 +184,7 @@ public static void verifySuccess(TStatus status, boolean withInfo) throws SQLExc public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentException { JdbcConnectionParams connParams = new JdbcConnectionParams(); - if (!uri.startsWith(URL_PREFIX)) { + if (! uri.startsWith(URL_PREFIX)) { throw new IllegalArgumentException("Bad URL format"); } @@ -190,9 +193,10 @@ public static JdbcConnectionParams parseURL(String uri) throws IllegalArgumentEx connParams.setEmbeddedMode(true); return connParams; } - URI jdbcURI = URI.create(uri.substring(URI_JDBC_PREFIX.length())); + URI jdbcURI = URI.create(uri.substring(URI_JDBC_PREFIX.length())); connParams.setHost(jdbcURI.getHost()); + if (connParams.getHost() == null) { connParams.setEmbeddedMode(true); } else { diff --git jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java index b108c7a..7919e8a 100644 --- jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java +++ jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.common.util.HiveVersionInfo; +import org.apache.hive.jdbc.Utils.JdbcConnectionParams; /** @@ -1080,10 +1081,18 @@ public void testResultSetMetaData() throws SQLException { // [url] [host] [port] [db] private static final String[][] URL_PROPERTIES = new String[][] { + // tcp mode {"jdbc:hive2://", "", "", "default"}, {"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"}, {"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"}, - {"jdbc:hive2://foo:1243", "foo", "1243", "default"}}; + {"jdbc:hive2://foo:1243", "foo", "1243", "default"}, + + // http mode + {"jdbc:hive2://server:10002/db;user=foo;password=bar?" + + "hive.server2.servermode=http;" + + "hive.server2.http.path=hs2", + "server", "10002", "db"}, + }; public void testDriverProperties() throws SQLException { HiveDriver driver = new HiveDriver(); @@ -1095,7 +1104,30 @@ public void testDriverProperties() throws SQLException { assertDpi(dpi[1], "PORT", testValues[2]); assertDpi(dpi[2], "DBNAME", testValues[3]); } + } + private static final String[][] HTTP_URL_PROPERTIES = new String[][] { + {"jdbc:hive2://server:10002/db;" + + "user=foo;password=bar?" + + "hive.server2.servermode=http;" + + "hive.server2.http.path=hs2", "server", "10002", "db", "http", "hs2"}, + {"jdbc:hive2://server:10000/testdb;" + + "user=foo;password=bar?" + + "hive.server2.servermode=thrift;" + + "hive.server2.http.path=", "server", "10000", "testdb", "thrift", ""}, + }; + + public void testParseUrlHttpMode() throws SQLException { + HiveDriver driver = new HiveDriver(); + + for (String[] testValues : HTTP_URL_PROPERTIES) { + JdbcConnectionParams params = Utils.parseURL(testValues[0]); + assertEquals(params.getHost(), testValues[1]); + assertEquals(params.getPort(), Integer.parseInt(testValues[2])); + assertEquals(params.getDbName(), testValues[3]); + assertEquals(params.getHiveConfs().get("hive.server2.servermode"), testValues[4]); + assertEquals(params.getHiveConfs().get("hive.server2.http.path"), testValues[5]); + } } private static void assertDpi(DriverPropertyInfo dpi, String name, diff --git service/src/java/org/apache/hive/service/cli/thrift/HttpServlet.java service/src/java/org/apache/hive/service/cli/thrift/HttpServlet.java new file mode 100644 index 0000000..079701d --- /dev/null +++ service/src/java/org/apache/hive/service/cli/thrift/HttpServlet.java @@ -0,0 +1,67 @@ +package org.apache.hive.service.cli.thrift; + +import java.io.IOException; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.thrift.TProcessor; +import org.apache.thrift.protocol.TProtocolFactory; +import org.apache.thrift.server.TServlet; + +public class HttpServlet extends TServlet { + + private static final long serialVersionUID = 1L; + public static final Log LOG = LogFactory.getLog(HttpServlet.class.getName()); + + public HttpServlet(TProcessor processor, TProtocolFactory protocolFactory) { + super(processor, protocolFactory); + } + + @Override + protected void doPost(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + + logRequestHeader(request); + super.doPost(request, response); + } + + protected void logRequestHeader(HttpServletRequest request){ + + String authHeaderBase64 = request.getHeader("Authorization"); + + if(authHeaderBase64 == null){ + LOG.warn("HttpServlet: no HTTP Authorization header"); + } + else { + if(!authHeaderBase64.startsWith("Basic")){ + LOG.warn("HttpServlet: HTTP Authorization header exists but is not Basic."); + } + else if(LOG.isDebugEnabled()) { + String authHeaderBase64_Payload = authHeaderBase64.substring("Basic ".length()); + String authHeaderString = org.apache.commons.codec.binary.StringUtils.newStringUtf8(org.apache.commons.codec.binary.Base64.decodeBase64(authHeaderBase64_Payload.getBytes())); + String[] creds = authHeaderString.split(":"); + String username=null; + String password=null; + if(creds.length >= 1 ){ + username = creds[0]; + } + if(creds.length >= 2){ + password = creds[1]; + } + + if(password == null || password.equals("null") || password.equals("")){ + password = ""; + } + else { + password = "******"; // don't log the actual password. + } + + LOG.debug("HttpServlet: HTTP Authorization header:: username=" + username + " password=" + password); + } + } + } +} diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index 0788ead..6bba57f 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -29,8 +29,11 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.service.HiveServer.HiveServerHandler; +import org.apache.hadoop.util.Shell; import org.apache.hive.service.AbstractService; import org.apache.hive.service.auth.HiveAuthFactory; +import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes; import org.apache.hive.service.cli.CLIService; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.GetInfoType; @@ -44,12 +47,16 @@ import org.apache.thrift.TException; import org.apache.thrift.TProcessorFactory; import org.apache.thrift.protocol.TBinaryProtocol; +import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.server.TServer; +import org.apache.thrift.server.TServlet; import org.apache.thrift.server.TThreadPoolServer; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TTransportFactory; - - +import org.mortbay.jetty.nio.SelectChannelConnector; +import org.mortbay.jetty.servlet.Context; +import org.mortbay.jetty.servlet.ServletHolder; +import org.mortbay.thread.QueuedThreadPool; /** * CLIService. * @@ -396,48 +403,160 @@ public TFetchResultsResp FetchResults(TFetchResultsReq req) throws TException { @Override public void run() { try { - hiveAuthFactory = new HiveAuthFactory(); - TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory(); - TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this); - - String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT"); - if (portString != null) { - portNum = Integer.valueOf(portString); - } else { - portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT); - } - String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST"); - if (hiveHost == null) { - hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST); + String serverMode = System.getenv("HIVE_SERVER2_SERVERMODE"); + if(serverMode == null){ + serverMode = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_SERVERMODE); } + if(serverMode.equals("thrift")){ - if (hiveHost != null && !hiveHost.isEmpty()) { - serverAddress = new InetSocketAddress(hiveHost, portNum); - } else { - serverAddress = new InetSocketAddress(portNum); - } + hiveAuthFactory = new HiveAuthFactory(); + TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory(); + TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this); + + String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT"); + if (portString != null) { + portNum = Integer.valueOf(portString); + } else { + portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT); + } + String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST"); + if (hiveHost == null) { + hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST); + } - minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS); - maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS); + if (hiveHost != null && !hiveHost.isEmpty()) { + serverAddress = new InetSocketAddress(hiveHost, portNum); + } else { + serverAddress = new InetSocketAddress(portNum); + } - TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress)) - .processorFactory(processorFactory) - .transportFactory(transportFactory) - .protocolFactory(new TBinaryProtocol.Factory()) - .minWorkerThreads(minWorkerThreads) - .maxWorkerThreads(maxWorkerThreads); + minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS); + maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS); - server = new TThreadPoolServer(sargs); - LOG.info("ThriftCLIService listening on " + serverAddress); + TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress)) + .processorFactory(processorFactory) + .transportFactory(transportFactory) + .protocolFactory(new TBinaryProtocol.Factory()) + .minWorkerThreads(minWorkerThreads) + .maxWorkerThreads(maxWorkerThreads); - server.serve(); + server = new TThreadPoolServer(sargs); + + LOG.info("ThriftCLIService listening on " + serverAddress); + + server.serve(); + } + else if(serverMode.equals("http")){ + // Configure Jetty to serve http requests + // Example of a client connection URI: http://localhost:10000/servlets/thrifths2/ + // a gateway may cause actual target URI to differ, eg http://gateway:port/hive2/servlets/thrifths2/ + + verifyHttpConfiguration(hiveConf); + + String portString = System.getenv("HIVE_SERVER2_HTTP_PORT"); + if (portString != null) { + portNum = Integer.valueOf(portString); + } else { + portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_HTTP_PORT); + } + + minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_HTTP_MIN_WORKER_THREADS); + maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_HTTP_MAX_WORKER_THREADS); + + String httpPath = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_HTTP_PATH); + //The config parameter can be like "path", "/path", "/path/", "path/*", "/path1/path2/*" and so on. + //httpPath should end up as "/*", "/path/*" or "/path1/../pathN/*" + if(httpPath == null || httpPath.equals("")){ + httpPath = "/*"; + } + else { + if(!httpPath.startsWith("/")){ + httpPath = "/" + httpPath; + } + if(httpPath.endsWith("/")){ + httpPath = httpPath + "*"; + } + if(!httpPath.endsWith("/*")){ + httpPath = httpPath + "/*"; + } + } + + org.mortbay.jetty.Server httpServer = new org.mortbay.jetty.Server(); + + QueuedThreadPool threadPool = new QueuedThreadPool(); + threadPool.setMinThreads(minWorkerThreads); + threadPool.setMaxThreads(maxWorkerThreads); + httpServer.setThreadPool(threadPool); + SelectChannelConnector connector = new SelectChannelConnector(); + connector.setPort(portNum); + + connector.setReuseAddress(!Shell.WINDOWS); // Linux:yes, Windows:no + httpServer.addConnector(connector); + + new org.apache.hive.service.cli.thrift.TCLIService.Processor(new EmbeddedThriftCLIService()); + org.apache.hive.service.cli.thrift.TCLIService.Processor processor + = new org.apache.hive.service.cli.thrift.TCLIService.Processor(new EmbeddedThriftCLIService()); + + TProtocolFactory protocolFactory = new TBinaryProtocol.Factory(); + TServlet thriftServlet = new HttpServlet(processor,protocolFactory); + final Context context = new Context(httpServer, "/", Context.SESSIONS); + context.addServlet(new ServletHolder(thriftServlet), httpPath); + + //TODO: check defaults: maxTimeout,keepalive,maxBodySize,bodyRecieveDuration, etc. + httpServer.start(); + String msg = "Starting HiveServer2 in Http mode on port " + portNum + + " path=" + httpPath + + " with " + minWorkerThreads + ".." + maxWorkerThreads + " worker threads"; + HiveServerHandler.LOG.info(msg); + httpServer.join(); + } + else { + throw new Exception("unknown serverMode: " + serverMode); + } } catch (Throwable t) { t.printStackTrace(); } } + /** + * Verify that this configuration is supported by servermode of HTTP + * @param hiveConf + */ + private static void verifyHttpConfiguration(HiveConf hiveConf) { + String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION); + + //error out if KERBEROS or LDAP mode is being used, it is not supported + if(authType.equalsIgnoreCase(AuthTypes.KERBEROS.toString()) || + authType.equalsIgnoreCase(AuthTypes.LDAP.toString()) || + authType.equalsIgnoreCase(AuthTypes.CUSTOM.toString()) + ){ + String msg = ConfVars.HIVE_SERVER2_AUTHENTICATION + " setting of " + + authType + " is currently not supported with " + + ConfVars.HIVE_SERVER2_SERVERMODE + " setting of http"; + LOG.fatal(msg); + throw new RuntimeException(msg); + } + + if(authType.equalsIgnoreCase(AuthTypes.NONE.toString())){ + //NONE in case of thrift mode uses SASL + LOG.warn(ConfVars.HIVE_SERVER2_AUTHENTICATION + " setting to " + + authType + ". SASL is not supported with http servermode," + + " so using equivalent of " + AuthTypes.NOSASL); + } + + //doAs is currently not supported with http + if(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)){ + String msg = ConfVars.HIVE_SERVER2_ENABLE_DOAS + " setting of " + + "true is currently not supported with " + + ConfVars.HIVE_SERVER2_SERVERMODE + " setting of http"; + LOG.fatal(msg); + throw new RuntimeException(msg); + } + + } + } diff --git service/src/test/org/apache/hive/service/server/TestHS2HttpInvalidConf.java service/src/test/org/apache/hive/service/server/TestHS2HttpInvalidConf.java new file mode 100644 index 0000000..913b379 --- /dev/null +++ service/src/test/org/apache/hive/service/server/TestHS2HttpInvalidConf.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.server; + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.sql.DriverManager; +import java.sql.SQLException; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Test hive server2 using thrift over http transport. + * + */ +public class TestHS2HttpInvalidConf { + private static int portNum; + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + portNum = MetaStoreUtils.findFreePort(); + Class.forName(org.apache.hive.jdbc.HiveDriver.class.getName()); + } + + private void startHS2WithConf(HiveConf hiveConf) + throws SQLException, IOException { + hiveConf.setVar(ConfVars.HIVE_SERVER2_SERVERMODE, "http"); + portNum = MetaStoreUtils.findFreePort(); + hiveConf.setIntVar(ConfVars.HIVE_SERVER2_HTTP_PORT, portNum); + + HiveServer2 hiveServer2 = new HiveServer2(); + hiveServer2.init(hiveConf); + hiveServer2.start(); + + } + + + public void testWithAuthMode(AuthTypes authType) { + //test that invalid http path results in exception + boolean caughtEx = false; + try{ + HiveConf hconf = new HiveConf(); + hconf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, authType.toString()); + + //unfortunately, the startup can't throw an exception + // because of the way the service interfaces are + startHS2WithConf(hconf); + + String url = "jdbc:hive2://localhost:" + portNum; + + //this should throw an exception as the cluster will be down + DriverManager.getConnection(url, "", ""); + }catch(SQLException e){ + caughtEx = true; + } catch (IOException e) { + //this exception is not expected + e.printStackTrace(); + } + assertTrue("exception expected", caughtEx); + } + + @Test + public void testKerberosMode() { + testWithAuthMode(AuthTypes.KERBEROS); + } + + @Test + public void testLDAPMode() { + testWithAuthMode(AuthTypes.LDAP); + } + + @Test + public void testCustomMode() { + testWithAuthMode(AuthTypes.CUSTOM); + } +} diff --git service/src/test/org/apache/hive/service/server/TestHiveServer2Http.java service/src/test/org/apache/hive/service/server/TestHiveServer2Http.java new file mode 100644 index 0000000..20de2cc --- /dev/null +++ service/src/test/org/apache/hive/service/server/TestHiveServer2Http.java @@ -0,0 +1,150 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.server; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Test hive server2 using thrift over http transport. + * + */ +public class TestHiveServer2Http { + + private static HiveServer2 hiveServer2; + private static int portNum; + private static final String HTTP_PATH = "hs2path"; + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + HiveConf hiveConf = new HiveConf(); + hiveConf.setVar(ConfVars.HIVE_SERVER2_SERVERMODE, "http"); + portNum = MetaStoreUtils.findFreePort(); + hiveConf.setIntVar(ConfVars.HIVE_SERVER2_HTTP_PORT, portNum); + hiveConf.setVar(ConfVars.HIVE_SERVER2_HTTP_PATH, HTTP_PATH); + hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + hiveServer2 = new HiveServer2(); + hiveServer2.init(hiveConf); + hiveServer2.start(); + Thread.sleep(1000); + + Class.forName(org.apache.hive.jdbc.HiveDriver.class.getName()); + } + + /** + * @throws java.lang.Exception + */ + @AfterClass + public static void tearDownAfterClass() throws Exception { + if (hiveServer2 != null) { + hiveServer2.stop(); + } + } + + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception { + } + + /** + * @throws java.lang.Exception + */ + @After + public void tearDown() throws Exception { + } + + @Test + public void testPositive() throws SQLException { + test("http", HTTP_PATH); + + } + + private void test(String serverMode, String httpPath) throws SQLException { + String url = "jdbc:hive2://localhost:" + portNum + + "/default?" + + ConfVars.HIVE_SERVER2_SERVERMODE + "=" + serverMode + + ";" + + ConfVars.HIVE_SERVER2_HTTP_PATH + "=" + httpPath + ; + + Connection con1 = DriverManager.getConnection(url, "", ""); + + assertNotNull("Connection is null", con1); + assertFalse("Connection should not be closed", con1.isClosed()); + + Statement stmt = con1.createStatement(); + assertNotNull("Statement is null", stmt); + + stmt.execute("show databases"); + + ResultSet res = stmt.getResultSet(); + assertTrue("has at least one database", res.next()); + + stmt.close(); + } + + @Test + public void testInvalidPath() throws SQLException { + //test that invalid http path results in exception + boolean caughtEx = false; + try{ + test("http", "invalidPath"); + }catch(SQLException e){ + caughtEx = true; + } + assertTrue("exception expected", caughtEx); + + } + + //Disabled test - resulted in OOM ex + //TODO investigate why the OOM happened + public void testIncorrectMode() throws SQLException { + //test that trying to connect using thrift results in exception + boolean caughtEx = false; + try{ + test("thrift", "invalidPath"); + }catch(SQLException e){ + caughtEx = true; + } + assertTrue("exception expected", caughtEx); + + } + +}