Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java (revision 1199131) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java (working copy) @@ -18,17 +18,6 @@ package org.apache.hadoop.hive.jdbc; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.service.HiveClient; -import org.apache.hadoop.hive.service.HiveInterface; -import org.apache.hadoop.hive.service.HiveServer; -import org.apache.thrift.TException; -import org.apache.thrift.protocol.TBinaryProtocol; -import org.apache.thrift.protocol.TProtocol; -import org.apache.thrift.transport.TSocket; -import org.apache.thrift.transport.TTransport; -import org.apache.thrift.transport.TTransportException; - import java.sql.Array; import java.sql.Blob; import java.sql.CallableStatement; @@ -37,6 +26,7 @@ import java.sql.DatabaseMetaData; import java.sql.NClob; import java.sql.PreparedStatement; +import java.sql.ResultSet; import java.sql.SQLClientInfoException; import java.sql.SQLException; import java.sql.SQLWarning; @@ -47,6 +37,18 @@ import java.util.Map; import java.util.Properties; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.service.HiveClient; +import org.apache.hadoop.hive.service.HiveInterface; +import org.apache.hadoop.hive.service.HiveServer; +import org.apache.thrift.TException; +import org.apache.thrift.protocol.TBinaryProtocol; +import org.apache.thrift.protocol.TProtocol; +import org.apache.thrift.transport.TSocket; +import org.apache.thrift.transport.TTransport; +import org.apache.thrift.transport.TTransportException; + /** * HiveConnection. * @@ -57,21 +59,20 @@ private boolean isClosed = true; private SQLWarning warningChain = null; - private static final String URI_PREFIX = "jdbc:hive://"; - /** - * TODO: - parse uri (use java.net.URI?). + * For the delegate only */ - public HiveConnection(String uri, Properties info) throws SQLException { - if (!uri.startsWith(URI_PREFIX)) { + protected HiveConnection() { + } + + public HiveConnection(String host, int port, String database, Properties info, String uri) + throws SQLException { + if (!uri.startsWith(HiveDriver.URL_PREFIX)) { throw new SQLException("Invalid URL: " + uri, "08S01"); } - // remove prefix - uri = uri.substring(URI_PREFIX.length()); - // If uri is not specified, use local mode. - if (uri.isEmpty()) { + if (host == null) { try { client = new HiveServer.HiveServerHandler(); } catch (MetaException e) { @@ -79,16 +80,6 @@ + e.getMessage(), "08S01",e); } } else { - // parse uri - // form: hostname:port/databasename - String[] parts = uri.split("/"); - String[] hostport = parts[0].split(":"); - int port = 10000; - String host = hostport[0]; - try { - port = Integer.parseInt(hostport[1]); - } catch (Exception e) { - } transport = new TSocket(host, port); TProtocol protocol = new TBinaryProtocol(transport); client = new HiveClient(protocol); @@ -101,6 +92,7 @@ } isClosed = false; configureConnection(); + configureDatabase(database); } private void configureConnection() throws SQLException { @@ -110,6 +102,33 @@ stmt.close(); } + private void configureDatabase(String database) throws SQLException { + if (database != null && database.length() > 0 + && !database.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) { + Statement stmt = createStatement(); + try { + ResultSet resultSet = stmt.executeQuery("show databases"); + boolean createDb = true; + while (resultSet.next()) { + String dbname = resultSet.getString(1); + if (database.equals(dbname)) { + createDb = false; + break; + } + } + resultSet.close(); + if (createDb) { + stmt.execute( + "create database " + database); + } + stmt.execute( + "use " + database); + } finally { + stmt.close(); + } + } + } + /* * (non-Javadoc) * Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (revision 1199131) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (working copy) @@ -27,7 +27,6 @@ import java.util.Properties; import java.util.jar.Attributes; import java.util.jar.Manifest; -import java.util.regex.Pattern; /** * HiveDriver. @@ -51,7 +50,7 @@ /** * The required prefix for the connection URL. */ - private static final String URL_PREFIX = "jdbc:hive://"; + public static final String URL_PREFIX = "jdbc:hive://"; /** * If host is provided, without a port. @@ -61,17 +60,17 @@ /** * Property key for the database name. */ - private static final String DBNAME_PROPERTY_KEY = "DBNAME"; + public static final String DBNAME_PROPERTY_KEY = "DBNAME"; /** * Property key for the Hive Server host. */ - private static final String HOST_PROPERTY_KEY = "HOST"; + public static final String HOST_PROPERTY_KEY = "HOST"; /** * Property key for the Hive Server port. */ - private static final String PORT_PROPERTY_KEY = "PORT"; + public static final String PORT_PROPERTY_KEY = "PORT"; /** * @@ -92,16 +91,20 @@ * jdbc:hive:// - run in embedded mode jdbc:hive://localhost - connect to * localhost default port (10000) jdbc:hive://localhost:5050 - connect to * localhost port 5050 - * - * TODO: - write a better regex. - decide on uri format */ - public boolean acceptsURL(String url) throws SQLException { - return Pattern.matches(URL_PREFIX + ".*", url); + return parseURL(url, null) != null; } public Connection connect(String url, Properties info) throws SQLException { - return new HiveConnection(url, info); + Properties props = parseURL(url, info); + if (props == null) { + return null; + } + + return new HiveConnection(props.getProperty(HOST_PROPERTY_KEY), Integer.parseInt(props + .getProperty(PORT_PROPERTY_KEY, DEFAULT_PORT)), props.getProperty(DBNAME_PROPERTY_KEY), + info, url); } /** @@ -209,23 +212,18 @@ /** * Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and - * parses it. Everything after jdbc:hive// is optional. + * parses it. Everything after jdbc:hive:// is optional. * * @param url * @param defaults * @return * @throws java.sql.SQLException */ - private Properties parseURL(String url, Properties defaults) throws SQLException { - Properties urlProps = (defaults != null) ? new Properties(defaults) - : new Properties(); + protected Properties parseURL(String url, Properties defaults) throws SQLException { + Properties urlProps = new Properties(defaults); if (url == null || !url.startsWith(URL_PREFIX)) { - throw new SQLException("Invalid connection url: " + url); - } - - if (url.length() <= URL_PREFIX.length()) { - return urlProps; + throw null; } // [hostname]:[port]/[db_name] Index: jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java =================================================================== --- jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java (revision 1199131) +++ jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java (working copy) @@ -21,6 +21,7 @@ import java.io.PrintWriter; import java.sql.Connection; import java.sql.SQLException; +import java.util.Properties; import javax.sql.DataSource; @@ -29,12 +30,25 @@ * */ public class HiveDataSource implements DataSource { + /** Database Name */ + protected String databaseName = null; + + /** Host Name */ + protected String hostName = null; + + /** Port Number */ + protected int portNumber = -1; + + /** JDBC URL */ + protected String url = null; + + protected HiveDriver driver; /** - * + * Default constructor */ public HiveDataSource() { - // TODO Auto-generated constructor stub + driver = new HiveDriver(); } /* @@ -42,7 +56,6 @@ * * @see javax.sql.DataSource#getConnection() */ - public Connection getConnection() throws SQLException { return getConnection("", ""); } @@ -52,14 +65,21 @@ * * @see javax.sql.DataSource#getConnection(java.lang.String, java.lang.String) */ - public Connection getConnection(String username, String password) throws SQLException { - try { - return new HiveConnection("", null); - } catch (Exception ex) { - throw new SQLException("Error in getting HiveConnection",ex); + Properties props = new Properties(); + + if (hostName != null) { + props.setProperty(HiveDriver.HOST_PROPERTY_KEY, hostName); } + if (portNumber != -1) { + props.setProperty(HiveDriver.PORT_PROPERTY_KEY, Integer.toString(portNumber)); + } + if (databaseName != null) { + props.setProperty(HiveDriver.DBNAME_PROPERTY_KEY, databaseName); + } + + return driver.connect(url, props); } /* @@ -128,4 +148,36 @@ throw new SQLException("Method not supported"); } + public String getDatabaseName() { + return databaseName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } + + public String getHostName() { + return hostName; + } + + public void setHostName(String hostName) { + this.hostName = hostName; + } + + public int getPortNumber() { + return portNumber; + } + + public void setPortNumber(int portNumber) { + this.portNumber = portNumber; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + }