diff --git itests/hive-minikdc/pom.xml itests/hive-minikdc/pom.xml index 0bb78a3a0a6cd158f9ed0bda62a6222bc10190f6..e66dd2a7ceee7e3f80f01f9cb0d0b927ec37815c 100644 --- itests/hive-minikdc/pom.xml +++ itests/hive-minikdc/pom.xml @@ -60,6 +60,13 @@ org.apache.hive + hive-it-unit + ${project.version} + test + tests + + + org.apache.hive hive-jdbc ${project.version} test diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java index e11466dc19a14fdf5f795ea6b77ab74c94d13ede..e089aa95f31c6ca090d29f22a4ed6675b45b6ec0 100644 --- itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java @@ -28,73 +28,32 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; -import org.apache.hadoop.hive.ql.hooks.HookContext; -import org.apache.hadoop.hive.ql.hooks.HookContext.HookType; +import org.apache.hadoop.hive.hooks.TestHs2Hooks.PostExecHook; +import org.apache.hadoop.hive.hooks.TestHs2Hooks.PreExecHook; +import org.apache.hadoop.hive.hooks.TestHs2Hooks.SemanticAnalysisHook; import org.apache.hive.jdbc.miniHS2.MiniHS2; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Tests information retrieved from hooks, in Kerberos mode. */ public class TestHs2HooksWithMiniKdc { - private static final Logger LOG = LoggerFactory.getLogger(TestHs2HooksWithMiniKdc.class); - - public static class PostExecHook implements ExecuteWithHookContext { - private static String userName; - private static String ipAddress; - private static String operation; - private static Throwable error; - - public void run(HookContext hookContext) { - try { - if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) { - ipAddress = hookContext.getIpAddress(); - userName = hookContext.getUserName(); - operation = hookContext.getOperationName(); - } - } catch (Throwable t) { - LOG.error("Error in PostExecHook: " + t, t); - error = t; - } - } - } - - public static class PreExecHook implements ExecuteWithHookContext { - private static String userName; - private static String ipAddress; - private static String operation; - private static Throwable error; - - public void run(HookContext hookContext) { - try { - if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) { - ipAddress = hookContext.getIpAddress(); - userName = hookContext.getUserName(); - operation = hookContext.getOperationName(); - } - } catch (Throwable t) { - LOG.error("Error in PreExecHook: " + t, t); - error = t; - } - } - } private static MiniHS2 miniHS2 = null; private static MiniHiveKdc miniHiveKdc = null; private static Map confOverlay = new HashMap(); private Connection hs2Conn; @BeforeClass - public static void beforeTest() throws Exception { + public static void setUpBeforeClass() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); confOverlay.put(ConfVars.POSTEXECHOOKS.varname, PostExecHook.class.getName()); confOverlay.put(ConfVars.PREEXECHOOKS.varname, PreExecHook.class.getName()); + confOverlay.put(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, + SemanticAnalysisHook.class.getName()); HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); @@ -102,12 +61,30 @@ public static void beforeTest() throws Exception { miniHS2.start(confOverlay); } + @AfterClass + public static void tearDownAfterClass() throws Exception { + miniHS2.stop(); + } + @Before - public void setUp() throws Exception { + public void setUpTest() throws Exception { + PreExecHook.userName = null; + PreExecHook.ipAddress = null; + PreExecHook.operation = null; + PreExecHook.error = null; + PostExecHook.userName = null; + PostExecHook.ipAddress = null; + PostExecHook.operation = null; + PostExecHook.error = null; + SemanticAnalysisHook.userName = null; + SemanticAnalysisHook.ipAddress = null; + SemanticAnalysisHook.command = null; + SemanticAnalysisHook.preAnalyzeError = null; + SemanticAnalysisHook.postAnalyzeError = null; } @After - public void tearDown() throws Exception { + public void tearDownTest() throws Exception { if (hs2Conn != null) { try { hs2Conn.close(); @@ -117,16 +94,11 @@ public void tearDown() throws Exception { } } - @AfterClass - public static void afterTest() throws Exception { - miniHS2.stop(); - } - /** - * Test get IpAddress and username from hook. + * Test that hook context properties are correctly set. */ @Test - public void testIpUserName() throws Throwable { + public void testHookContexts() throws Throwable { miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1); hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL()); @@ -155,5 +127,24 @@ public void testIpUserName() throws Throwable { Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PreExecHook.userName); Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1")); Assert.assertEquals("SHOWTABLES", PreExecHook.operation); + + error = SemanticAnalysisHook.preAnalyzeError; + if (error != null) { + throw error; + } + error = SemanticAnalysisHook.postAnalyzeError; + if (error != null) { + throw error; + } + + Assert.assertNotNull(SemanticAnalysisHook.ipAddress, + "semantic hook context ipaddress is null"); + Assert.assertNotNull(SemanticAnalysisHook.userName, + "semantic hook context userName is null"); + Assert.assertNotNull(SemanticAnalysisHook.command , + "semantic hook context command is null"); + Assert.assertTrue(SemanticAnalysisHook.ipAddress, + SemanticAnalysisHook.ipAddress.contains("127.0.0.1")); + Assert.assertEquals("show tables", SemanticAnalysisHook.command); } -} +} \ No newline at end of file diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java index 06767582f0130a245b09a0c78f8e5f5076e393ca..56960763e6beb4783d2b5af72034e2c12a5f5743 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java @@ -19,8 +19,11 @@ //The tests here are heavily based on some timing, so there is some chance to fail. package org.apache.hadoop.hive.hooks; -import java.util.Properties; +import java.io.Serializable; +import java.lang.Override; import java.sql.Statement; +import java.util.List; +import java.util.Properties; import junit.framework.Assert; @@ -28,9 +31,15 @@ import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; import org.apache.hadoop.hive.ql.hooks.HookContext; import org.apache.hadoop.hive.ql.hooks.HookContext.HookType; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hive.jdbc.HiveConnection; import org.apache.hive.service.server.HiveServer2; import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; @@ -44,10 +53,10 @@ private static HiveServer2 hiveServer2; public static class PostExecHook implements ExecuteWithHookContext { - private static String userName; - private static String ipAddress; - private static String operation; - private static Throwable error; + public static String userName; + public static String ipAddress; + public static String operation; + public static Throwable error; public void run(HookContext hookContext) { try { @@ -64,10 +73,10 @@ public void run(HookContext hookContext) { } public static class PreExecHook implements ExecuteWithHookContext { - private static String userName; - private static String ipAddress; - private static String operation; - private static Throwable error; + public static String userName; + public static String ipAddress; + public static String operation; + public static Throwable error; public void run(HookContext hookContext) { try { @@ -83,6 +92,41 @@ public void run(HookContext hookContext) { } } + public static class SemanticAnalysisHook implements HiveSemanticAnalyzerHook { + public static String userName; + public static String command; + public static String ipAddress; + public static Throwable preAnalyzeError; + public static Throwable postAnalyzeError; + + @Override + public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, + ASTNode ast) throws SemanticException { + try { + userName = context.getUserName(); + ipAddress = context.getIpAddress(); + command = context.getCommand(); + } catch (Throwable t) { + LOG.error("Error in semantic analysis hook preAnalyze: " + t, t); + preAnalyzeError = t; + } + return ast; + } + + @Override + public void postAnalyze(HiveSemanticAnalyzerHookContext context, + List> rootTasks) throws SemanticException { + try { + userName = context.getUserName(); + ipAddress = context.getIpAddress(); + command = context.getCommand(); + } catch (Throwable t) { + LOG.error("Error in semantic analysis hook postAnalyze: " + t, t); + postAnalyzeError = t; + } + } + } + /** * @throws java.lang.Exception */ @@ -93,6 +137,8 @@ public static void setUpBeforeClass() throws Exception { PreExecHook.class.getName()); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, PostExecHook.class.getName()); + hiveConf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, + SemanticAnalysisHook.class.getName()); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); @@ -107,16 +153,32 @@ public static void tearDownAfterClass() throws Exception { } } + @Before + public void setUpTest() throws Exception { + PreExecHook.userName = null; + PreExecHook.ipAddress = null; + PreExecHook.operation = null; + PreExecHook.error = null; + PostExecHook.userName = null; + PostExecHook.ipAddress = null; + PostExecHook.operation = null; + PostExecHook.error = null; + SemanticAnalysisHook.userName = null; + SemanticAnalysisHook.ipAddress = null; + SemanticAnalysisHook.command = null; + SemanticAnalysisHook.preAnalyzeError = null; + SemanticAnalysisHook.postAnalyzeError = null; + } + /** - * Test get IpAddress and username from hook. + * Test that hook context properties are correctly set. */ @Test - public void testIpUserName() throws Throwable { + public void testHookContexts() throws Throwable { Properties connProp = new Properties(); connProp.setProperty("user", System.getProperty("user.name")); connProp.setProperty("password", ""); HiveConnection connection = new HiveConnection("jdbc:hive2://localhost:10000/default", connProp); - Statement stmt = connection.createStatement(); stmt.executeQuery("show databases"); stmt.executeQuery("show tables"); @@ -142,6 +204,24 @@ public void testIpUserName() throws Throwable { Assert.assertNotNull(PreExecHook.operation , "operation is null"); Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1")); Assert.assertEquals("SHOWTABLES", PreExecHook.operation); + + error = SemanticAnalysisHook.preAnalyzeError; + if (error != null) { + throw error; + } + error = SemanticAnalysisHook.postAnalyzeError; + if (error != null) { + throw error; + } + + Assert.assertNotNull(SemanticAnalysisHook.ipAddress, + "semantic hook context ipaddress is null"); + Assert.assertNotNull(SemanticAnalysisHook.userName, + "semantic hook context userName is null"); + Assert.assertNotNull(SemanticAnalysisHook.command , + "semantic hook context command is null"); + Assert.assertTrue(SemanticAnalysisHook.ipAddress, + SemanticAnalysisHook.ipAddress.contains("127.0.0.1")); + Assert.assertEquals("show tables", SemanticAnalysisHook.command); } } - diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index a105eca338122a0df09d9b126ebc5dc760721d9e..e25450531a71ef4ae4c6d9ea1788e618189a17cb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -409,6 +409,8 @@ public int compile(String command, boolean resetTaskIds) { HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl(); hookCtx.setConf(conf); hookCtx.setUserName(userName); + hookCtx.setIpAddress(SessionState.get().getUserIpAddress()); + hookCtx.setCommand(command); for (HiveSemanticAnalyzerHook hook : saHooks) { tree = hook.preAnalyze(hookCtx, tree); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java index 8694d07ac6c85f2ce2a82de32abb23d4147c9e5a..b78ce909a2fe6d34b69bb422527e4e520cbd7e4b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java @@ -57,4 +57,12 @@ public String getUserName(); public void setUserName(String userName); + + public String getIpAddress(); + + public void setIpAddress(String ipAddress); + + public String getCommand(); + + public void setCommand(String command); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java index 4f6dad4909be9a29ef5a92addda9e674a28606e9..4ce705b2b2a0ac41359c28350da412129f9ada58 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java @@ -33,6 +33,8 @@ Set inputs = null; Set outputs = null; private String userName; + private String ipAddress; + private String command; @Override public Hive getHive() throws HiveException { @@ -73,4 +75,24 @@ public String getUserName() { public void setUserName(String userName) { this.userName = userName; } + + @Override + public String getIpAddress() { + return ipAddress; + } + + @Override + public void setIpAddress(String ipAddress) { + this.ipAddress = ipAddress; + } + + @Override + public String getCommand() { + return command; + } + + @Override + public void setCommand(String command) { + this.command = command; + } }