diff --git itests/hive-minikdc/pom.xml itests/hive-minikdc/pom.xml
new file mode 100644
index 0000000..0bb78a3
--- /dev/null
+++ itests/hive-minikdc/pom.xml
@@ -0,0 +1,274 @@
+
+
+
+ 4.0.0
+
+ org.apache.hive
+ hive-it
+ 0.14.0-SNAPSHOT
+ ../pom.xml
+
+
+ hive-it-minikdc
+ jar
+ Hive Integration - Unit Tests with miniKdc
+
+
+ ../..
+
+
+
+
+
+
+ org.apache.hive
+ hive-common
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-beeline
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-cli
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-it-unit
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-jdbc
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-metastore
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-metastore
+ ${project.version}
+ test
+ tests
+
+
+ org.apache.hive
+ hive-serde
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-service
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-service
+ ${project.version}
+ test
+ tests
+
+
+ org.apache.hive
+ hive-exec
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-exec
+ ${project.version}
+ test
+ tests
+
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+ org.mockito
+ mockito-all
+ ${mockito-all.version}
+ test
+
+
+
+
+
+ hadoop-1
+
+
+ org.apache.hadoop
+ hadoop-core
+ ${hadoop-20S.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-test
+ ${hadoop-20S.version}
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.hadoop1.version}
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.hadoop1.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.hadoop1.version}
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.hadoop1.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-hadoop1-compat
+ ${hbase.hadoop1.version}
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop1-compat
+ ${hbase.hadoop1.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.hadoop1.version}
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.hadoop1.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-tools
+ ${hadoop-20S.version}
+ test
+
+
+ com.sun.jersey
+ jersey-servlet
+ test
+
+
+
+
+ hadoop-2
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop-23.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop-23.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop-23.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop-23.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-minikdc
+ ${hadoop-23.version}
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.hadoop2.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-minicluster
+ test
+
+
+ com.sun.jersey
+ jersey-servlet
+ test
+
+
+
+
+
+
+
+
+ org.apache.felix
+ maven-bundle-plugin
+ true
+ true
+
+
+
+
+
diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
new file mode 100644
index 0000000..f7ec93d
--- /dev/null
+++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import static org.junit.Assert.assertNotNull;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.GroupMappingServiceProvider;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.shims.ShimLoader;
+
+import com.google.common.io.Files;
+
+public class MiniHiveKdc {
+ public static String HIVE_SERVICE_PRINCIPAL = "hive";
+ public static String HIVE_TEST_USER_1 = "user1";
+ public static String HIVE_TEST_USER_2 = "user2";
+ public static String HIVE_TEST_SUPER_USER = "superuser";
+
+ private MiniKdc miniKdc;
+ private File workDir;
+ private Configuration conf;
+ private Map userPrincipals =
+ new HashMap();
+ private Properties kdcConf = MiniKdc.createConf();
+ private int keyTabCounter = 1;
+
+ // hadoop group mapping that maps user to same group
+ public static class HiveTestSimpleGroupMapping implements GroupMappingServiceProvider {
+ @Override
+ public List getGroups(String user) throws IOException {
+ List results = new ArrayList();
+ results.add(user);
+ return results;
+ }
+
+ @Override
+ public void cacheGroupsRefresh() throws IOException {
+ }
+
+ @Override
+ public void cacheGroupsAdd(List groups) throws IOException {
+ }
+ }
+
+ public static MiniHiveKdc getMiniHiveKdc (Configuration conf) throws Exception {
+ return new MiniHiveKdc(conf);
+ }
+
+ public MiniHiveKdc(Configuration conf)
+ throws Exception {
+ File baseDir = Files.createTempDir();
+ baseDir.deleteOnExit();
+ workDir = new File (baseDir, "HiveMiniKdc");
+ this.conf = conf;
+
+ /**
+ * Hadoop security classes read the default realm via static initialization,
+ * before miniKdc is initialized. Hence we set the realm via a test configuration
+ * and propagate that to miniKdc.
+ */
+ assertNotNull("java.security.krb5.conf is needed for hadoop security",
+ System.getProperty("java.security.krb5.conf"));
+ System.clearProperty("java.security.krb5.conf");
+
+ miniKdc = new MiniKdc(kdcConf, new File(workDir, "miniKdc"));
+ miniKdc.start();
+
+ // create default users
+ addUserPrincipal(getServicePrincipalForUser(HIVE_SERVICE_PRINCIPAL));
+ addUserPrincipal(HIVE_TEST_USER_1);
+ addUserPrincipal(HIVE_TEST_USER_2);
+ addUserPrincipal(HIVE_TEST_SUPER_USER);
+ }
+
+ public String getKeyTabFile(String principalName) {
+ return userPrincipals.get(principalName);
+ }
+
+ public void shutDown() {
+ miniKdc.stop();
+ }
+
+ public void addUserPrincipal(String principal) throws Exception {
+ File keytab = new File(workDir, "miniKdc" + keyTabCounter++ + ".keytab");
+ miniKdc.createPrincipal(keytab, principal);
+ userPrincipals.put(principal, keytab.getPath());
+ }
+
+ public UserGroupInformation loginUser(String principal)
+ throws Exception {
+ ShimLoader.getHadoopShims().loginUserFromKeytab(principal,
+ getKeyTabFile(principal));
+ return ShimLoader.getHadoopShims().getUGIForConf(conf);
+ }
+
+ public Properties getKdcConf() {
+ return kdcConf;
+ }
+
+ public String getFullyQualifiedUserPrincipal(String shortUserName) {
+ return shortUserName + "@" + miniKdc.getRealm();
+ }
+
+ public String getFullyQualifiedServicePrincipal(String shortUserName) {
+ return getServicePrincipalForUser(shortUserName) + "@" + miniKdc.getRealm();
+ }
+
+ public String getServicePrincipalForUser(String shortUserName) {
+ return shortUserName + "/" + miniKdc.getHost();
+ }
+
+ public String getHiveServicePrincipal() {
+ return getServicePrincipalForUser(HIVE_SERVICE_PRINCIPAL);
+ }
+
+ public String getFullHiveServicePrincipal() {
+ return getServicePrincipalForUser(HIVE_SERVICE_PRINCIPAL) + "@" + miniKdc.getRealm();
+ }
+
+ public String getDefaultUserPrincipal() {
+ return HIVE_TEST_USER_1;
+ }
+
+
+}
diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
new file mode 100644
index 0000000..62bfa1e
--- /dev/null
+++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import static org.junit.Assert.*;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.session.HiveSessionHook;
+import org.apache.hive.service.cli.session.HiveSessionHookContext;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestJdbcWithMiniKdc {
+ // Need to hive.server2.session.hook to SessionHookTest in hive-site
+ public static final String SESSION_USER_NAME = "proxy.test.session.user";
+
+ // set current user in session conf
+ public static class SessionHookTest implements HiveSessionHook {
+ @Override
+ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException {
+ sessionHookContext.getSessionConf().set(SESSION_USER_NAME,
+ sessionHookContext.getSessionUser());
+ }
+ }
+
+ private static MiniHS2 miniHS2 = null;
+ private static MiniHiveKdc miniHiveKdc = null;
+ private static Map confOverlay = new HashMap();
+ private Connection hs2Conn;
+
+ @BeforeClass
+ public static void beforeTest() throws Exception {
+ Class.forName(MiniHS2.getJdbcDriverName());
+ confOverlay.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,
+ SessionHookTest.class.getName());
+ HiveConf hiveConf = new HiveConf();
+ miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf);
+ String hivePrincipal =
+ miniHiveKdc.getFullyQualifiedServicePrincipal(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL);
+ String hiveKeytab = miniHiveKdc.getKeyTabFile(
+ miniHiveKdc.getServicePrincipalForUser(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL));
+
+ miniHS2 = new MiniHS2.Builder().withConf(new HiveConf()).
+ withMiniKdc(hivePrincipal, hiveKeytab).build();
+ miniHS2.start(confOverlay);
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ if (hs2Conn != null) {
+ try {
+ hs2Conn.close();
+ } catch (Exception e) {
+ // Ignore shutdown errors since there are negative tests
+ }
+ }
+ }
+
+ @AfterClass
+ public static void afterTest() throws Exception {
+ miniHS2.stop();
+ }
+
+ /***
+ * Basic connection test
+ * @throws Exception
+ */
+ @Test
+ public void testConnection() throws Exception {
+ miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL());
+ verifyProperty(SESSION_USER_NAME, miniHiveKdc.
+ getFullyQualifiedUserPrincipal(MiniHiveKdc.HIVE_TEST_USER_1));
+ }
+
+ /***
+ * Negative test, verify that connection to secure HS2 fails when
+ * required connection attributes are not provided
+ * @throws Exception
+ */
+ @Test
+ public void testConnectionNeg() throws Exception {
+ miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
+ try {
+ String url = miniHS2.getJdbcURL().replaceAll(";principal.*", "");
+ hs2Conn = DriverManager.getConnection(url);
+ fail("NON kerberos connection should fail");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+ }
+
+ /***
+ * Test token based authentication over kerberos
+ * Login as super user and retrieve the token for normal user
+ * use the token to connect connect as normal user
+ * @throws Exception
+ */
+ @Test
+ public void testTokenAuth() throws Exception {
+ miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_SUPER_USER);
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL());
+
+ // retrieve token and store in the cache
+ String token = ((HiveConnection)hs2Conn).getDelegationToken(
+ MiniHiveKdc.HIVE_TEST_USER_1, MiniHiveKdc.HIVE_SERVICE_PRINCIPAL);
+ assertTrue(token != null && !token.isEmpty());
+ hs2Conn.close();
+
+ UserGroupInformation ugi = miniHiveKdc.
+ loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
+ // Store token in the cache
+ storeToken(token, ugi);
+ hs2Conn = DriverManager.getConnection(miniHS2.getBaseJdbcURL() +
+ "default;auth=delegationToken");
+ verifyProperty(SESSION_USER_NAME, MiniHiveKdc.HIVE_TEST_USER_1);
+ }
+
+ /***
+ * Negtive test for token based authentication
+ * Verify that a user can't retrieve a token for user that
+ * it's not allowed to impersonate
+ * @throws Exception
+ */
+ @Test
+ public void testNegativeTokenAuth() throws Exception {
+ miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_SUPER_USER);
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL());
+
+ // retrieve token and store in the cache
+ String token = ((HiveConnection)hs2Conn).getDelegationToken(
+ MiniHiveKdc.HIVE_TEST_USER_2, MiniHiveKdc.HIVE_SERVICE_PRINCIPAL);
+ hs2Conn.close();
+
+ assertNull(MiniHiveKdc.HIVE_TEST_SUPER_USER + " shouldn't be allowed to create token for " +
+ MiniHiveKdc.HIVE_TEST_USER_2, token);
+ }
+
+ /**
+ * Test connection using the proxy user connection property
+ * @throws Exception
+ */
+ @Test
+ public void testProxyAuth() throws Exception {
+ miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_SUPER_USER);
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default",
+ ";hive.server2.proxy.user=" + MiniHiveKdc.HIVE_TEST_USER_1));
+ verifyProperty(SESSION_USER_NAME, MiniHiveKdc.HIVE_TEST_USER_1);
+ }
+
+ /**
+ * Test connection using the proxy user connection property.
+ * Verify proxy connection fails when super user doesn't have privilege to
+ * impersonate the given user
+ * @throws Exception
+ */
+ @Test
+ public void testNegativeProxyAuth() throws Exception {
+ miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_SUPER_USER);
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL("default",
+ ";hive.server2.proxy.user=" + MiniHiveKdc.HIVE_TEST_USER_2));
+ verifyProperty(SESSION_USER_NAME, MiniHiveKdc.HIVE_TEST_USER_2);
+ fail(MiniHiveKdc.HIVE_TEST_SUPER_USER + " shouldn't be allowed proxy connection for "
+ + MiniHiveKdc.HIVE_TEST_USER_2);
+ } catch (SQLException e) {
+ // Expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+ }
+
+ /**
+ * Verify the config property value
+ * @param propertyName
+ * @param expectedValue
+ * @throws Exception
+ */
+ private void verifyProperty(String propertyName, String expectedValue) throws Exception {
+ Statement stmt = hs2Conn .createStatement();
+ ResultSet res = stmt.executeQuery("set " + propertyName);
+ assertTrue(res.next());
+ String results[] = res.getString(1).split("=");
+ assertEquals("Property should be set", results.length, 2);
+ assertEquals("Property should be set", expectedValue, results[1]);
+ }
+
+ // Store the given token in the UGI
+ private void storeToken(String tokenStr, UserGroupInformation ugi)
+ throws Exception {
+ ShimLoader.getHadoopShims().setTokenStr(ugi,
+ tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
+ }
+
+}
diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java
new file mode 100644
index 0000000..9d69952
--- /dev/null
+++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.common.io.Files;
+
+public class TestMiniHiveKdc {
+
+ private static File baseDir;
+ private MiniHiveKdc miniHiveKdc;
+ private HiveConf hiveConf;
+
+ @BeforeClass
+ public static void beforeTest() throws Exception {
+ baseDir = Files.createTempDir();
+ baseDir.deleteOnExit();
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ hiveConf = new HiveConf();
+ miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ miniHiveKdc.shutDown();
+ }
+
+ @Test
+ public void testLogin() throws Exception {
+ String servicePrinc = miniHiveKdc.getHiveServicePrincipal();
+ assertNotNull(servicePrinc);
+ miniHiveKdc.loginUser(servicePrinc);
+ assertTrue(ShimLoader.getHadoopShims().isLoginKeytabBased());
+ UserGroupInformation ugi =
+ ShimLoader.getHadoopShims().getUGIForConf(hiveConf);
+ assertEquals(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL, ugi.getShortUserName());
+ }
+
+ @AfterClass
+ public static void afterTest() throws Exception {
+
+ }
+
+}
diff --git itests/hive-minikdc/src/test/resources/core-site.xml itests/hive-minikdc/src/test/resources/core-site.xml
new file mode 100644
index 0000000..cf2eee4
--- /dev/null
+++ itests/hive-minikdc/src/test/resources/core-site.xml
@@ -0,0 +1,76 @@
+
+
+
+
+
+
+
+ hadoop.proxyuser.oozie.hosts
+ *
+
+
+ hadoop.proxyuser.oozie.groups
+ *
+
+
+
+
+ hadoop.proxyuser.httpfs.hosts
+ *
+
+
+ hadoop.proxyuser.httpfs.groups
+ *
+
+
+
+
+ hadoop.proxyuser.hive.hosts
+ *
+
+
+ hadoop.proxyuser.hive.groups
+ *
+
+
+
+
+ hadoop.proxyuser.superuser.hosts
+ *
+
+
+ hadoop.proxyuser.superuser.groups
+ user1
+
+
+
+ hadoop.security.authentication
+ kerberos
+
+
+
+ hadoop.security.authorization
+ true
+
+
+
+ hadoop.security.group.mapping
+ org.apache.hive.minikdc.MiniHiveKdc$HiveTestSimpleGroupMapping
+
+
+
diff --git itests/hive-unit/pom.xml itests/hive-unit/pom.xml
index 6aff277..0eb784b 100644
--- itests/hive-unit/pom.xml
+++ itests/hive-unit/pom.xml
@@ -32,60 +32,57 @@
-
-
+
org.apache.hive
hive-common
${project.version}
- test
org.apache.hive
- hive-beeline
+ hive-jdbc
${project.version}
- test
org.apache.hive
- hive-cli
+ hive-service
${project.version}
- test
org.apache.hive
- hive-it-util
+ hive-exec
${project.version}
- test
+
+
+
org.apache.hive
- hive-jdbc
+ hive-beeline
${project.version}
test
org.apache.hive
- hive-metastore
+ hive-cli
${project.version}
test
org.apache.hive
- hive-metastore
+ hive-it-util
${project.version}
test
- tests
org.apache.hive
- hive-serde
+ hive-metastore
${project.version}
- test
+ tests
org.apache.hive
- hive-service
+ hive-serde
${project.version}
test
@@ -93,20 +90,12 @@
org.apache.hive
hive-service
${project.version}
- test
tests
org.apache.hive
hive-exec
${project.version}
- test
-
-
- org.apache.hive
- hive-exec
- ${project.version}
- test
tests
@@ -132,7 +121,6 @@
org.apache.hadoop
hadoop-core
${hadoop-20S.version}
- test
org.apache.hadoop
@@ -212,7 +200,6 @@
org.apache.hadoop
hadoop-common
${hadoop-23.version}
- test
org.apache.hadoop
@@ -280,5 +267,4 @@
-
diff --git itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
new file mode 100644
index 0000000..095b989
--- /dev/null
+++ itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc.miniHS2;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+
+/***
+ * Base class for Hive service
+ * AbstractHiveService.
+ *
+ */
+public abstract class AbstractHiveService {
+ private HiveConf hiveConf = null;
+ private String hostname;
+ private int binaryPort;
+ private int httpPort;
+ private boolean startedHiveService = false;
+ private List addedProperties = new ArrayList();
+
+ public AbstractHiveService(HiveConf hiveConf, String hostname, int binaryPort, int httpPort) {
+ this.hiveConf = hiveConf;
+ this.hostname = hostname;
+ this.binaryPort = binaryPort;
+ this.httpPort = httpPort;
+ }
+
+ /**
+ * Get Hive conf
+ * @return
+ */
+ public HiveConf getHiveConf() {
+ return hiveConf;
+ }
+
+ /**
+ * Get config property
+ * @param propertyKey
+ * @return
+ */
+ public String getConfProperty(String propertyKey) {
+ return hiveConf.get(propertyKey);
+ }
+
+ /**
+ * Set config property
+ * @param propertyKey
+ * @param propertyValue
+ */
+ public void setConfProperty(String propertyKey, String propertyValue) {
+ System.setProperty(propertyKey, propertyValue);
+ hiveConf.set(propertyKey, propertyValue);
+ addedProperties.add(propertyKey);
+ }
+
+ /**
+ * Create system properties set by this server instance. This ensures that
+ * the changes made by current test are not impacting subsequent tests.
+ */
+ public void clearProperties() {
+ for (String propKey : addedProperties ) {
+ System.clearProperty(propKey);
+ }
+ }
+
+ /**
+ * Retrieve warehouse directory
+ * @return
+ */
+ public Path getWareHouseDir() {
+ return new Path(hiveConf.getVar(ConfVars.METASTOREWAREHOUSE));
+ }
+
+ public void setWareHouseDir(String wareHouseURI) {
+ verifyNotStarted();
+ System.setProperty(ConfVars.METASTOREWAREHOUSE.varname, wareHouseURI);
+ hiveConf.setVar(ConfVars.METASTOREWAREHOUSE, wareHouseURI);
+ }
+
+ /**
+ * Set service host
+ * @param hostName
+ */
+ public void setHost(String hostName) {
+ this.hostname = hostName;
+ }
+
+ // get service host
+ protected String getHost() {
+ return hostname;
+ }
+
+ /**
+ * Set binary service port #
+ * @param portNum
+ */
+ public void setBinaryPort(int portNum) {
+ this.binaryPort = portNum;
+ }
+
+ /**
+ * Set http service port #
+ * @param portNum
+ */
+ public void setHttpPort(int portNum) {
+ this.httpPort = portNum;
+ }
+
+ // Get binary service port #
+ protected int getBinaryPort() {
+ return binaryPort;
+ }
+
+ // Get http service port #
+ protected int getHttpPort() {
+ return httpPort;
+ }
+
+ public boolean isStarted() {
+ return startedHiveService;
+ }
+
+ protected void setStarted(boolean hiveServiceStatus) {
+ this.startedHiveService = hiveServiceStatus;
+ }
+
+ protected void verifyStarted() {
+ if (!isStarted()) {
+ throw new IllegalStateException("HiveServer2 is not running");
+ }
+ }
+
+ protected void verifyNotStarted() {
+ if (isStarted()) {
+ throw new IllegalStateException("HiveServer2 already running");
+ }
+ }
+
+}
diff --git itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
new file mode 100644
index 0000000..733f60b
--- /dev/null
+++ itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
@@ -0,0 +1,314 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc.miniHS2;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
+import org.apache.hadoop.hive.shims.HadoopShims.MiniMrShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.service.Service;
+import org.apache.hive.service.cli.CLIServiceClient;
+import org.apache.hive.service.cli.SessionHandle;
+import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
+import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
+import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
+import org.apache.hive.service.server.HiveServer2;
+
+import com.google.common.io.Files;
+
+public class MiniHS2 extends AbstractHiveService {
+ private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
+ private HiveServer2 hiveServer2 = null;
+ private final File baseDir;
+ private final Path baseDfsDir;
+ private static final AtomicLong hs2Counter = new AtomicLong();
+ private static final String HS2_BINARY_MODE = "binary";
+ private static final String HS2_HTTP_MODE = "http";
+ private MiniMrShim mr;
+ private MiniDFSShim dfs;
+ private boolean useMiniMR = false;
+ private boolean useMiniKdc = false;
+ private String serverPrincipal;
+ private String serverKeytab;
+
+ public static class Builder {
+ private HiveConf hiveConf = new HiveConf();
+ private boolean useMiniMR = false;
+ private boolean useMiniKdc = false;
+ private String serverPrincipal;
+ private String serverKeytab;
+
+ public Builder() {
+ }
+
+ public Builder withMiniMR() {
+ this.useMiniMR = true;
+ return this;
+ }
+
+ public Builder withMiniKdc(String serverPrincipal, String serverKeytab) {
+ this.useMiniKdc = true;
+ this.serverPrincipal = serverPrincipal;
+ this.serverKeytab = serverKeytab;
+ return this;
+ }
+
+ public Builder withConf(HiveConf hiveConf) {
+ this.hiveConf = hiveConf;
+ return this;
+ }
+
+ public MiniHS2 build() throws Exception {
+ if (useMiniMR && useMiniKdc) {
+ throw new IOException("Can't create secure miniMr ... yet");
+ }
+ return new MiniHS2(hiveConf, useMiniMR, useMiniKdc, serverPrincipal, serverKeytab);
+ }
+ }
+
+ public MiniMrShim getMr() {
+ return mr;
+ }
+
+ public void setMr(MiniMrShim mr) {
+ this.mr = mr;
+ }
+
+ public MiniDFSShim getDfs() {
+ return dfs;
+ }
+
+ public void setDfs(MiniDFSShim dfs) {
+ this.dfs = dfs;
+ }
+
+ public boolean isUseMiniMR() {
+ return useMiniMR;
+ }
+
+ public void setUseMiniMR(boolean useMiniMR) {
+ this.useMiniMR = useMiniMR;
+ }
+
+ public boolean isUseMiniKdc() {
+ return useMiniKdc;
+ }
+
+ private MiniHS2(HiveConf hiveConf, boolean useMiniMR, boolean useMiniKdc, String serverPrincipal, String serverKeytab) throws Exception {
+ super(hiveConf, "localhost", MetaStoreUtils.findFreePort(), MetaStoreUtils.findFreePort());
+ this.useMiniMR = useMiniMR;
+ this.useMiniKdc = useMiniKdc;
+ this.serverPrincipal = serverPrincipal;
+ this.serverKeytab = serverKeytab;
+ baseDir = Files.createTempDir();
+ FileSystem fs;
+ if (useMiniMR) {
+ dfs = ShimLoader.getHadoopShims().getMiniDfs(hiveConf, 4, true, null);
+ fs = dfs.getFileSystem();
+ mr = ShimLoader.getHadoopShims().getMiniMrCluster(hiveConf, 4,
+ fs.getUri().toString(), 1);
+ // store the config in system properties
+ mr.setupConfiguration(getHiveConf());
+ baseDfsDir = new Path(new Path(fs.getUri()), "/base");
+ } else {
+ fs = FileSystem.getLocal(hiveConf);
+ baseDfsDir = new Path("file://"+ baseDir.getPath());
+ }
+ if (useMiniKdc) {
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL, serverPrincipal);
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, serverKeytab);
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, "KERBEROS");
+ }
+ String metaStoreURL = "jdbc:derby:" + baseDir.getAbsolutePath() + File.separator + "test_metastore-" +
+ hs2Counter.incrementAndGet() + ";create=true";
+
+ fs.mkdirs(baseDfsDir);
+ Path wareHouseDir = new Path(baseDfsDir, "warehouse");
+ fs.mkdirs(wareHouseDir);
+ setWareHouseDir(wareHouseDir.toString());
+ System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
+ hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStoreURL);
+ // reassign a new port, just in case if one of the MR services grabbed the last one
+ setBinaryPort(MetaStoreUtils.findFreePort());
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE);
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, getHost());
+ hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getBinaryPort());
+ hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort());
+ HiveMetaStore.HMSHandler.resetDefaultDBFlag();
+
+ Path scratchDir = new Path(baseDfsDir, "scratch");
+ fs.mkdirs(scratchDir);
+ System.setProperty(HiveConf.ConfVars.SCRATCHDIR.varname, scratchDir.toString());
+ System.setProperty(HiveConf.ConfVars.LOCALSCRATCHDIR.varname,
+ baseDir.getPath() + File.separator + "scratch");
+ }
+
+ public MiniHS2(HiveConf hiveConf) throws Exception {
+ this(hiveConf, false);
+ }
+
+ public MiniHS2(HiveConf hiveConf, boolean useMiniMR) throws Exception {
+ this(hiveConf, useMiniMR, false, null, null);
+ }
+
+ public void start(Map confOverlay) throws Exception {
+ hiveServer2 = new HiveServer2();
+ // Set confOverlay parameters
+ for (Map.Entry entry : confOverlay.entrySet()) {
+ setConfProperty(entry.getKey(), entry.getValue());
+ }
+ hiveServer2.init(getHiveConf());
+ hiveServer2.start();
+ waitForStartup();
+ setStarted(true);
+ }
+
+ public void stop() {
+ verifyStarted();
+ hiveServer2.stop();
+ setStarted(false);
+ try {
+ if (mr != null) {
+ mr.shutdown();
+ mr = null;
+ }
+ if (dfs != null) {
+ dfs.shutdown();
+ dfs = null;
+ }
+ } catch (IOException e) {
+ // Ignore errors cleaning up miniMR
+ }
+ FileUtils.deleteQuietly(baseDir);
+ }
+
+ public CLIServiceClient getServiceClient() {
+ verifyStarted();
+ return getServiceClientInternal();
+ }
+
+ public CLIServiceClient getServiceClientInternal() {
+ for (Service service : hiveServer2.getServices()) {
+ if (service instanceof ThriftBinaryCLIService) {
+ return new ThriftCLIServiceClient((ThriftBinaryCLIService) service);
+ }
+ if (service instanceof ThriftHttpCLIService) {
+ return new ThriftCLIServiceClient((ThriftHttpCLIService) service);
+ }
+ }
+ throw new IllegalStateException("HiveServer2 not running Thrift service");
+ }
+
+ /**
+ * return connection URL for this server instance
+ * @return
+ */
+ public String getJdbcURL() {
+ return getJdbcURL("default");
+ }
+
+ /**
+ * return connection URL for this server instance
+ * @param dbName - DB name to be included in the URL
+ * @return
+ */
+ public String getJdbcURL(String dbName) {
+ return getJdbcURL(dbName, "");
+ }
+
+ /**
+ * return connection URL for this server instance
+ * @param dbName - DB name to be included in the URL
+ * @param urlExtension - Addional string to be appended to URL
+ * @return
+ */
+ public String getJdbcURL(String dbName, String urlExtension) {
+ assert urlExtension != null;
+ String krbConfig = "";
+ if (isUseMiniKdc()) {
+ krbConfig = ";principal=" + serverPrincipal;
+ }
+ return getBaseJdbcURL() + dbName + krbConfig + urlExtension;
+ }
+
+ /**
+ * Build base JDBC URL
+ * @return
+ */
+ public String getBaseJdbcURL() {
+ String transportMode = getConfProperty(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
+ if(transportMode != null && (transportMode.equalsIgnoreCase(HS2_HTTP_MODE))) {
+ return "jdbc:hive2://" + getHost() + ":" + getHttpPort() + "/";
+ }
+ else {
+ return "jdbc:hive2://" + getHost() + ":" + getBinaryPort() + "/";
+ }
+ }
+
+ public static String getJdbcDriverName() {
+ return driverName;
+ }
+
+ private void waitForStartup() throws Exception {
+ int waitTime = 0;
+ long startupTimeout = 1000L * 1000L;
+ CLIServiceClient hs2Client = getServiceClientInternal();
+ SessionHandle sessionHandle = null;
+ do {
+ Thread.sleep(500L);
+ waitTime += 500L;
+ if (waitTime > startupTimeout) {
+ throw new TimeoutException("Couldn't access new HiveServer2: " + getJdbcURL());
+ }
+ try {
+ Map sessionConf = new HashMap();
+ /**
+ if (isUseMiniKdc()) {
+ getMiniKdc().loginUser(getMiniKdc().getDefaultUserPrincipal());
+ sessionConf.put("principal", serverPrincipal);
+ }
+ */
+ sessionHandle = hs2Client.openSession("foo", "bar", sessionConf);
+ } catch (Exception e) {
+ // service not started yet
+ continue;
+ }
+ hs2Client.closeSession(sessionHandle);
+ break;
+ } while (true);
+ }
+}
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
deleted file mode 100644
index 095b989..0000000
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.jdbc.miniHS2;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-
-/***
- * Base class for Hive service
- * AbstractHiveService.
- *
- */
-public abstract class AbstractHiveService {
- private HiveConf hiveConf = null;
- private String hostname;
- private int binaryPort;
- private int httpPort;
- private boolean startedHiveService = false;
- private List addedProperties = new ArrayList();
-
- public AbstractHiveService(HiveConf hiveConf, String hostname, int binaryPort, int httpPort) {
- this.hiveConf = hiveConf;
- this.hostname = hostname;
- this.binaryPort = binaryPort;
- this.httpPort = httpPort;
- }
-
- /**
- * Get Hive conf
- * @return
- */
- public HiveConf getHiveConf() {
- return hiveConf;
- }
-
- /**
- * Get config property
- * @param propertyKey
- * @return
- */
- public String getConfProperty(String propertyKey) {
- return hiveConf.get(propertyKey);
- }
-
- /**
- * Set config property
- * @param propertyKey
- * @param propertyValue
- */
- public void setConfProperty(String propertyKey, String propertyValue) {
- System.setProperty(propertyKey, propertyValue);
- hiveConf.set(propertyKey, propertyValue);
- addedProperties.add(propertyKey);
- }
-
- /**
- * Create system properties set by this server instance. This ensures that
- * the changes made by current test are not impacting subsequent tests.
- */
- public void clearProperties() {
- for (String propKey : addedProperties ) {
- System.clearProperty(propKey);
- }
- }
-
- /**
- * Retrieve warehouse directory
- * @return
- */
- public Path getWareHouseDir() {
- return new Path(hiveConf.getVar(ConfVars.METASTOREWAREHOUSE));
- }
-
- public void setWareHouseDir(String wareHouseURI) {
- verifyNotStarted();
- System.setProperty(ConfVars.METASTOREWAREHOUSE.varname, wareHouseURI);
- hiveConf.setVar(ConfVars.METASTOREWAREHOUSE, wareHouseURI);
- }
-
- /**
- * Set service host
- * @param hostName
- */
- public void setHost(String hostName) {
- this.hostname = hostName;
- }
-
- // get service host
- protected String getHost() {
- return hostname;
- }
-
- /**
- * Set binary service port #
- * @param portNum
- */
- public void setBinaryPort(int portNum) {
- this.binaryPort = portNum;
- }
-
- /**
- * Set http service port #
- * @param portNum
- */
- public void setHttpPort(int portNum) {
- this.httpPort = portNum;
- }
-
- // Get binary service port #
- protected int getBinaryPort() {
- return binaryPort;
- }
-
- // Get http service port #
- protected int getHttpPort() {
- return httpPort;
- }
-
- public boolean isStarted() {
- return startedHiveService;
- }
-
- protected void setStarted(boolean hiveServiceStatus) {
- this.startedHiveService = hiveServiceStatus;
- }
-
- protected void verifyStarted() {
- if (!isStarted()) {
- throw new IllegalStateException("HiveServer2 is not running");
- }
- }
-
- protected void verifyNotStarted() {
- if (isStarted()) {
- throw new IllegalStateException("HiveServer2 already running");
- }
- }
-
-}
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
deleted file mode 100644
index d76750c..0000000
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
+++ /dev/null
@@ -1,212 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.jdbc.miniHS2;
-
-import static org.junit.Assert.assertNotNull;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.HiveMetaStore;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
-import org.apache.hadoop.hive.shims.HadoopShims.MiniMrShim;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hive.service.Service;
-import org.apache.hive.service.cli.CLIServiceClient;
-import org.apache.hive.service.cli.SessionHandle;
-import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
-import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
-import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
-import org.apache.hive.service.server.HiveServer2;
-
-import com.google.common.io.Files;
-
-public class MiniHS2 extends AbstractHiveService {
- private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
- private HiveServer2 hiveServer2 = null;
- private final File baseDir;
- private final Path baseDfsDir;
- private static final AtomicLong hs2Counter = new AtomicLong();
- private static final String HS2_BINARY_MODE = "binary";
- private static final String HS2_HTTP_MODE = "http";
- private MiniMrShim mr;
- private MiniDFSShim dfs;
-
- public MiniHS2(HiveConf hiveConf) throws IOException {
- this(hiveConf, false);
- }
-
- public MiniHS2(HiveConf hiveConf, boolean useMiniMR) throws IOException {
- super(hiveConf, "localhost", MetaStoreUtils.findFreePort(), MetaStoreUtils.findFreePort());
- baseDir = Files.createTempDir();
- FileSystem fs;
- if (useMiniMR) {
- dfs = ShimLoader.getHadoopShims().getMiniDfs(hiveConf, 4, true, null);
- fs = dfs.getFileSystem();
- mr = ShimLoader.getHadoopShims().getMiniMrCluster(hiveConf, 4,
- fs.getUri().toString(), 1);
- // store the config in system properties
- mr.setupConfiguration(getHiveConf());
- baseDfsDir = new Path(new Path(fs.getUri()), "/base");
- } else {
- fs = FileSystem.getLocal(hiveConf);
- baseDfsDir = new Path("file://"+ baseDir.getPath());
- }
- String metaStoreURL = "jdbc:derby:" + baseDir.getAbsolutePath() + File.separator + "test_metastore-" +
- hs2Counter.incrementAndGet() + ";create=true";
-
- fs.mkdirs(baseDfsDir);
- Path wareHouseDir = new Path(baseDfsDir, "warehouse");
- fs.mkdirs(wareHouseDir);
- setWareHouseDir(wareHouseDir.toString());
- System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
- hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStoreURL);
- // reassign a new port, just in case if one of the MR services grabbed the last one
- setBinaryPort(MetaStoreUtils.findFreePort());
- hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE);
- hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, getHost());
- hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getBinaryPort());
- hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort());
- HiveMetaStore.HMSHandler.resetDefaultDBFlag();
-
- Path scratchDir = new Path(baseDfsDir, "scratch");
- fs.mkdirs(scratchDir);
- System.setProperty(HiveConf.ConfVars.SCRATCHDIR.varname, scratchDir.toString());
- System.setProperty(HiveConf.ConfVars.LOCALSCRATCHDIR.varname,
- baseDir.getPath() + File.separator + "scratch");
- }
-
- public void start(Map confOverlay) throws Exception {
- hiveServer2 = new HiveServer2();
- // Set confOverlay parameters
- for (Map.Entry entry : confOverlay.entrySet()) {
- setConfProperty(entry.getKey(), entry.getValue());
- }
- hiveServer2.init(getHiveConf());
- hiveServer2.start();
- waitForStartup();
- setStarted(true);
- }
-
- public void stop() {
- verifyStarted();
- hiveServer2.stop();
- setStarted(false);
- try {
- if (mr != null) {
- mr.shutdown();
- mr = null;
- }
- if (dfs != null) {
- dfs.shutdown();
- dfs = null;
- }
- } catch (IOException e) {
- // Ignore errors cleaning up miniMR
- }
- FileUtils.deleteQuietly(baseDir);
- }
-
- public CLIServiceClient getServiceClient() {
- verifyStarted();
- return getServiceClientInternal();
- }
-
- public CLIServiceClient getServiceClientInternal() {
- for (Service service : hiveServer2.getServices()) {
- if (service instanceof ThriftBinaryCLIService) {
- return new ThriftCLIServiceClient((ThriftBinaryCLIService) service);
- }
- if (service instanceof ThriftHttpCLIService) {
- return new ThriftCLIServiceClient((ThriftHttpCLIService) service);
- }
- }
- throw new IllegalStateException("HiveServer2 not running Thrift service");
- }
-
- /**
- * return connection URL for this server instance
- * @return
- */
- public String getJdbcURL() {
- return getJdbcURL("default");
- }
-
- /**
- * return connection URL for this server instance
- * @param dbName - DB name to be included in the URL
- * @return
- */
- public String getJdbcURL(String dbName) {
- return getJdbcURL(dbName, "");
- }
-
- /**
- * return connection URL for this server instance
- * @param dbName - DB name to be included in the URL
- * @param urlExtension - Addional string to be appended to URL
- * @return
- */
- public String getJdbcURL(String dbName, String urlExtension) {
- assertNotNull("URL extension shouldn't be null", urlExtension);
- String transportMode = getConfProperty(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
- if(transportMode != null && (transportMode.equalsIgnoreCase(HS2_HTTP_MODE))) {
- return "jdbc:hive2://" + getHost() + ":" + getHttpPort() + "/" + dbName;
- }
- else {
- return "jdbc:hive2://" + getHost() + ":" + getBinaryPort() + "/" + dbName + urlExtension;
- }
- }
-
- public static String getJdbcDriverName() {
- return driverName;
- }
-
- private void waitForStartup() throws Exception {
- int waitTime = 0;
- long startupTimeout = 1000L * 1000000000L;
- CLIServiceClient hs2Client = getServiceClientInternal();
- SessionHandle sessionHandle = null;
- do {
- Thread.sleep(500L);
- waitTime += 500L;
- if (waitTime > startupTimeout) {
- throw new TimeoutException("Couldn't access new HiveServer2: " + getJdbcURL());
- }
- try {
- sessionHandle = hs2Client.openSession("foo", "bar");
- } catch (Exception e) {
- // service not started yet
- continue;
- }
- hs2Client.closeSession(sessionHandle);
- break;
- } while (true);
- }
-
-}
diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java
index 84f508c..da51a55 100644
--- itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java
+++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java
@@ -42,7 +42,7 @@
private Map confOverlay;
@BeforeClass
- public static void beforeTest() throws IOException {
+ public static void beforeTest() throws Exception {
miniHS2 = new MiniHS2(new HiveConf());
}
diff --git itests/pom.xml itests/pom.xml
index 589ba25..11c6282 100644
--- itests/pom.xml
+++ itests/pom.xml
@@ -40,4 +40,13 @@
qtest
+
+
+ hadoop-2
+
+ hive-minikdc
+
+
+
+
diff --git pom.xml pom.xml
index 27effb5..0430a77 100644
--- pom.xml
+++ pom.xml
@@ -147,6 +147,7 @@
2.9.1
3.4.5
1.1
+ 2.4.0
@@ -630,6 +631,11 @@
+
+ org.apache.felix
+ maven-bundle-plugin
+ ${felix.version}
+
@@ -752,6 +758,7 @@
${test.warehouse.scheme}${test.warehouse.dir}
src,src1,srcbucket,srcbucket2,src_json,src_thrift,src_sequencefile,srcpart,alltypesorc
+ ${test.tmp.dir}/conf/krb5.conf
@@ -963,6 +970,11 @@
${hadoop-23.version}
+ org.apache.hadoop
+ hadoop-minikdc
+ ${hadoop-23.version}
+
+
org.apache.hbase
hbase-common
${hbase.hadoop2.version}
diff --git service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 551a69d..141eb61 100644
--- service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -75,8 +75,8 @@ public String getAuthName() {
public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
- public HiveAuthFactory() throws TTransportException {
- conf = new HiveConf();
+ public HiveAuthFactory(HiveConf conf) throws TTransportException {
+ this.conf = conf;
transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
@@ -172,7 +172,11 @@ public String getRemoteUser() {
}
public String getIpAddress() {
- return saslServer != null ? saslServer.getRemoteAddress().toString() : null;
+ if(saslServer != null && saslServer.getRemoteAddress() != null) {
+ return saslServer.getRemoteAddress().toString();
+ } else {
+ return null;
+ }
}
// Perform kerberos login using the hadoop shim API if the configuration is available
diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
index 6fbc847..b009a88 100644
--- service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
+++ service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
@@ -39,7 +39,7 @@ public ThriftBinaryCLIService(CLIService cliService) {
@Override
public void run() {
try {
- hiveAuthFactory = new HiveAuthFactory();
+ hiveAuthFactory = new HiveAuthFactory(hiveConf);
TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
index cb01cfd..f4cbe91 100644
--- service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
+++ service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
@@ -96,7 +96,7 @@ public void run() {
connector.setReuseAddress(!Shell.WINDOWS);
httpServer.addConnector(connector);
- hiveAuthFactory = new HiveAuthFactory();
+ hiveAuthFactory = new HiveAuthFactory(hiveConf);
TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
TProcessor processor = processorFactory.getProcessor(null);
@@ -173,4 +173,4 @@ private static void verifyHttpConfiguration(HiveConf hiveConf) {
}
}
-}
\ No newline at end of file
+}