diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 535ad3d..ed18060 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -135,11 +135,7 @@ public HiveConnection(String uri, Properties info) throws SQLException { setupLoginTimeout(); - try { - connParams = Utils.parseURL(uri, info); - } catch (ZooKeeperHiveClientException e) { - throw new SQLException(e); - } + connParams = Utils.parseURL(uri, info); jdbcUriString = connParams.getJdbcUriString(); // JDBC URL: jdbc:hive2://:/dbName;sess_var_list?hive_conf_list#hive_var_list // each list: =;= and so on @@ -174,7 +170,6 @@ public HiveConnection(String uri, Properties info) throws SQLException { EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService(); embeddedClient.init(null); client = embeddedClient; - // open client session openSession(); executeInitSql(); @@ -185,7 +180,8 @@ public HiveConnection(String uri, Properties info) throws SQLException { if (StringUtils.isNotBlank(strRetries)) { maxRetries = Integer.parseInt(strRetries); } - } catch(NumberFormatException e) { // Ignore the exception + } catch(NumberFormatException e) { + // Ignore the exception } for (int numRetries = 0;;) { @@ -197,10 +193,9 @@ public HiveConnection(String uri, Properties info) throws SQLException { // open client session openSession(); executeInitSql(); - break; } catch (Exception e) { - LOG.warn("Failed to connect to " + connParams.getHost() + ":" + connParams.getPort()); + LOG.warn("Failed to connect to " + connParams.getHost() + ":" + connParams.getPort(), e); String errMsg = null; String warnMsg = "Could not open client transport with JDBC Uri: " + jdbcUriString + ": "; if (isZkDynamicDiscoveryMode()) { @@ -451,7 +446,11 @@ public long getRetryInterval() { // Pick trust store config from the given path sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE); try (FileInputStream fis = new FileInputStream(sslTrustStorePath)) { - sslTrustStore.load(fis, sslTrustStorePassword.toCharArray()); + if (sslTrustStorePassword != null) { + sslTrustStore.load(fis, sslTrustStorePassword.toCharArray()); + } else { + sslTrustStore.load(fis, null); + } } sslContext = SSLContexts.custom().loadTrustMaterial(sslTrustStore, null).build(); socketFactory = diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java index a349f8b..cf5e5a2 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java @@ -231,17 +231,11 @@ public boolean jdbcCompliant() { private Properties parseURLforPropertyInfo(String url, Properties defaults) throws SQLException { Properties urlProps = (defaults != null) ? new Properties(defaults) : new Properties(); - if (url == null || !url.startsWith(Utils.URL_PREFIX)) { throw new SQLException("Invalid connection url: " + url); } - JdbcConnectionParams params = null; - try { - params = Utils.parseURL(url, defaults); - } catch (ZooKeeperHiveClientException e) { - throw new SQLException(e); - } + params = Utils.parseURL(url, defaults); String host = params.getHost(); if (host == null){ host = ""; diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java index bfae8b9..2874899 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -268,7 +268,7 @@ static void verifySuccess(TStatus status, boolean withInfo) throws SQLException } public static JdbcConnectionParams parseURL(String uri) throws JdbcUriParseException, - SQLException, ZooKeeperHiveClientException { + SQLException { return parseURL(uri, new Properties()); } /** @@ -294,12 +294,14 @@ public static JdbcConnectionParams parseURL(String uri) throws JdbcUriParseExcep * @return * @throws SQLException */ - static JdbcConnectionParams parseURL(String uri, Properties info) throws JdbcUriParseException, - SQLException, ZooKeeperHiveClientException { + static JdbcConnectionParams parseURL(String uri, Properties connProperties) + throws JdbcUriParseException { + String suppliedUri = uri; JdbcConnectionParams connParams = new JdbcConnectionParams(); - + LOG.info("Parsing JDBC uri: " + suppliedUri); if (!uri.startsWith(URL_PREFIX)) { - throw new JdbcUriParseException("Bad URL format: Missing prefix " + URL_PREFIX); + throw new JdbcUriParseException("Bad URL format: Missing prefix " + URL_PREFIX + + ". Supplied JDBC Uri: " + suppliedUri); } // For URLs with no other configuration @@ -322,7 +324,6 @@ static JdbcConnectionParams parseURL(String uri, Properties info) throws JdbcUri // jdbc:hive2:///dbName;sess_var_list?hive_conf_list#hive_var_list connParams.setEmbeddedMode(true); } else { - LOG.info("Supplied authorities: " + suppliedAuthorities); String[] authorityList = suppliedAuthorities.split(","); connParams.setSuppliedAuthorityList(authorityList); uri = uri.replace(suppliedAuthorities, dummyAuthorityString); @@ -346,16 +347,9 @@ static JdbcConnectionParams parseURL(String uri, Properties info) throws JdbcUri } else { // we have dbname followed by session parameters dbName = sessVars.substring(0, sessVars.indexOf(';')); + // parse session vars sessVars = sessVars.substring(sessVars.indexOf(';') + 1); - if (sessVars != null) { - Matcher sessMatcher = pattern.matcher(sessVars); - while (sessMatcher.find()) { - if (connParams.getSessionVars().put(sessMatcher.group(1), sessMatcher.group(2)) != null) { - throw new JdbcUriParseException("Bad URL format: Multiple values for property " - + sessMatcher.group(1)); - } - } - } + setParams(suppliedUri, pattern, sessVars, connParams.getSessionVars()); } if (!dbName.isEmpty()) { connParams.setDbName(dbName); @@ -364,53 +358,79 @@ static JdbcConnectionParams parseURL(String uri, Properties info) throws JdbcUri // parse hive conf settings String confStr = jdbcURI.getQuery(); - if (confStr != null) { - Matcher confMatcher = pattern.matcher(confStr); - while (confMatcher.find()) { - connParams.getHiveConfs().put(confMatcher.group(1), confMatcher.group(2)); - } - } + setParams(suppliedUri, pattern, confStr, connParams.getHiveConfs()); // parse hive var settings String varStr = jdbcURI.getFragment(); - if (varStr != null) { - Matcher varMatcher = pattern.matcher(varStr); - while (varMatcher.find()) { - connParams.getHiveVars().put(varMatcher.group(1), varMatcher.group(2)); + setParams(suppliedUri, pattern, varStr, connParams.getHiveVars()); + + // Override connection params with those supplied in connection properties + applyConnectionProps(connParams, connProperties); + + handleDeprecations(connParams); + + // Extract host, port + if (connParams.isEmbeddedMode()) { + // In case of embedded mode we were supplied with an empty authority. + // So we never substituted the authority with a dummy one. + connParams.setHost(jdbcURI.getHost()); + connParams.setPort(jdbcURI.getPort()); + } else { + // Configure host, port and params from ZooKeeper if used, + // and substitute the dummy authority with a resolved one + try { + configureConnParams(connParams); + } catch (JdbcUriParseException | ZooKeeperHiveClientException e) { + throw new JdbcUriParseException("Error parsing JDBC Uri: " + suppliedUri, e); } + // We check for invalid host, port while configuring connParams with configureConnParams() + String authorityStr = connParams.getHost() + ":" + connParams.getPort(); + LOG.info("Resolved authority: " + authorityStr); + uri = uri.replace(dummyAuthorityString, authorityStr); + connParams.setJdbcUriString(uri); } - - // Apply configs supplied in the JDBC connection properties object - for (Map.Entry kv : info.entrySet()) { + + return connParams; + } + + // Apply configs supplied in the JDBC connection properties object + private static void applyConnectionProps(JdbcConnectionParams connParams, + Properties connProperties) { + for (Map.Entry kv : connProperties.entrySet()) { if ((kv.getKey() instanceof String)) { String key = (String) kv.getKey(); if (key.startsWith(JdbcConnectionParams.HIVE_VAR_PREFIX)) { connParams.getHiveVars().put( - key.substring(JdbcConnectionParams.HIVE_VAR_PREFIX.length()), info.getProperty(key)); + key.substring(JdbcConnectionParams.HIVE_VAR_PREFIX.length()), + connProperties.getProperty(key)); } else if (key.startsWith(JdbcConnectionParams.HIVE_CONF_PREFIX)) { connParams.getHiveConfs().put( - key.substring(JdbcConnectionParams.HIVE_CONF_PREFIX.length()), info.getProperty(key)); + key.substring(JdbcConnectionParams.HIVE_CONF_PREFIX.length()), + connProperties.getProperty(key)); } } } + // Extract user/password from JDBC connection properties if its not supplied // in the connection URL if (!connParams.getSessionVars().containsKey(JdbcConnectionParams.AUTH_USER)) { - if (info.containsKey(JdbcConnectionParams.AUTH_USER)) { - connParams.getSessionVars().put(JdbcConnectionParams.AUTH_USER, - info.getProperty(JdbcConnectionParams.AUTH_USER)); - } - if (info.containsKey(JdbcConnectionParams.AUTH_PASSWD)) { - connParams.getSessionVars().put(JdbcConnectionParams.AUTH_PASSWD, - info.getProperty(JdbcConnectionParams.AUTH_PASSWD)); - } + if (connProperties.containsKey(JdbcConnectionParams.AUTH_USER)) { + connParams.getSessionVars().put(JdbcConnectionParams.AUTH_USER, + connProperties.getProperty(JdbcConnectionParams.AUTH_USER)); + } + if (connProperties.containsKey(JdbcConnectionParams.AUTH_PASSWD)) { + connParams.getSessionVars().put(JdbcConnectionParams.AUTH_PASSWD, + connProperties.getProperty(JdbcConnectionParams.AUTH_PASSWD)); + } } - if (info.containsKey(JdbcConnectionParams.AUTH_TYPE)) { + if (connProperties.containsKey(JdbcConnectionParams.AUTH_TYPE)) { connParams.getSessionVars().put(JdbcConnectionParams.AUTH_TYPE, - info.getProperty(JdbcConnectionParams.AUTH_TYPE)); + connProperties.getProperty(JdbcConnectionParams.AUTH_TYPE)); } + } + private static void handleDeprecations(JdbcConnectionParams connParams) { // Handle all deprecations here: String newUsage; String usageUrlBase = "jdbc:hive2://:/dbName;"; @@ -429,23 +449,21 @@ static JdbcConnectionParams parseURL(String uri, Properties info) throws JdbcUri newUsage = usageUrlBase + JdbcConnectionParams.HTTP_PATH + "="; handleParamDeprecation(connParams.getHiveConfs(), connParams.getSessionVars(), JdbcConnectionParams.HTTP_PATH_DEPRECATED, JdbcConnectionParams.HTTP_PATH, newUsage); - // Extract host, port - if (connParams.isEmbeddedMode()) { - // In case of embedded mode we were supplied with an empty authority. - // So we never substituted the authority with a dummy one. - connParams.setHost(jdbcURI.getHost()); - connParams.setPort(jdbcURI.getPort()); - } else { - // Configure host, port and params from ZooKeeper if used, - // and substitute the dummy authority with a resolved one - configureConnParams(connParams); - // We check for invalid host, port while configuring connParams with configureConnParams() - String authorityStr = connParams.getHost() + ":" + connParams.getPort(); - LOG.info("Resolved authority: " + authorityStr); - uri = uri.replace(dummyAuthorityString, authorityStr); - connParams.setJdbcUriString(uri); + } + + private static void setParams(String suppliedUri, Pattern pattern, String paramStr, + Map paramGroup) throws JdbcUriParseException { + if (paramStr != null) { + Matcher confMatcher = pattern.matcher(paramStr); + while (confMatcher.find()) { + // If there a property is specified multiple times in the URI, we can't decide which + // key-value pair to pick. Throw error. + if (paramGroup.put(confMatcher.group(1), confMatcher.group(2)) != null) { + throw new JdbcUriParseException("Bad URL format: Multiple values for property: " + + confMatcher.group(1) + ". Supplied JDBC Uri: " + suppliedUri); + } + } } - return connParams; } /** diff --git a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java index 8d6003a..206834b 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java +++ b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java @@ -70,6 +70,7 @@ static void configureConnParams(JdbcConnectionParams connParams) } // Now pick a server node randomly serverNode = serverHosts.get(randomizer.nextInt(serverHosts.size())); + LOG.info("Picked server: " + serverNode + " from ZooKeeper. Trying to configure it now."); connParams.setCurrentHostZnodePath(serverNode); // Read data from the znode for this server node // This data could be either config string (new releases) or server end @@ -93,7 +94,7 @@ static void configureConnParams(JdbcConnectionParams connParams) applyConfs(dataStr, connParams); } } catch (Exception e) { - throw new ZooKeeperHiveClientException("Unable to read HiveServer2 configs from ZooKeeper", e); + throw new ZooKeeperHiveClientException(e); } finally { // Close the client connection with ZooKeeper if (zooKeeperClient != null) {