diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java index e403d28af4..5df5edebf9 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestTriggersMoveWorkloadManager.java @@ -86,8 +86,8 @@ public static void beforeTest() throws Exception { @Test(timeout = 60000) public void testTriggerMoveAndKill() throws Exception { - Expression moveExpression = ExpressionFactory.fromString("EXECUTION_TIME > 1sec"); - Expression killExpression = ExpressionFactory.fromString("EXECUTION_TIME > 5000ms"); + Expression moveExpression = ExpressionFactory.fromString("EXECUTION_TIME > '1sec'"); + Expression killExpression = ExpressionFactory.fromString("EXECUTION_TIME > '5000ms'"); Trigger moveTrigger = new ExecutionTrigger("slow_query_move", moveExpression, new Action(Action.Type.MOVE_TO_POOL, "ETL")); Trigger killTrigger = new ExecutionTrigger("slow_query_kill", killExpression, diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 37e98456f1..2dea254c87 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1097,7 +1097,7 @@ private String buildTriggerExpression(ASTNode ast) throws SemanticException { } StringBuilder builder = new StringBuilder(); for (int i = 0; i < ast.getChildCount(); ++i) { - builder.append(ast.getChild(i).getText()); + builder.append(stripQuotes(ast.getChild(i).getText())); builder.append(' '); } builder.deleteCharAt(builder.length() - 1); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g index 09a4368984..d4b5782430 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g @@ -472,21 +472,6 @@ ByteLengthLiteral (Digit)+ ('b' | 'B' | 'k' | 'K' | 'm' | 'M' | 'g' | 'G') ; -TimeFullLiteral - : - (Digit)+ ('NS' | 'NSEC' | 'NSECS' | 'NANOSECOND' | 'NANOSECONDS' | - 'US' | 'USEC' | 'USECS' | 'MICROSECOND' | 'MICROSECONDS' | - 'MS' | 'MSEC' | 'MSECS' | 'MILLISECOND' | 'MILLISECONDS' | - 'SEC' | 'SECS' | 'SECOND' | 'SECONDS' | - 'MIN' | 'MINS' | 'MINUTE' | 'MINUTES' | - 'HOUR' | 'HOURS' | 'DAY' | 'DAYS') - ; - -ByteLengthFullLiteral - : - (Digit)+ ('KB' | 'MB' | 'GB' | 'TB' | 'PB') - ; - Number : (Digit)+ ( DOT (Digit)* (Exponent)? | Exponent)? diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g index 21f2d45dc1..f8c47f972f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g @@ -179,12 +179,12 @@ triggerAtomExpression : identifier comparisionOperator triggerLiteral ; + triggerLiteral @init { gParent.pushMsg("triggerLiteral", state); } @after { gParent.popMsg(state); } : Number - | TimeFullLiteral - | ByteLengthFullLiteral + | StringLiteral ; comparisionOperator diff --git ql/src/java/org/apache/hadoop/hive/ql/wm/ExpressionFactory.java ql/src/java/org/apache/hadoop/hive/ql/wm/ExpressionFactory.java index 2299a1b315..aca2c04409 100644 --- ql/src/java/org/apache/hadoop/hive/ql/wm/ExpressionFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/wm/ExpressionFactory.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseException; +import org.apache.hadoop.hive.ql.plan.PlanUtils; /** * Factory to create expressions @@ -60,7 +61,8 @@ public static Expression fromString(final String expression) { } final String counterName = node.getChild(0).getText(); - final String counterValueStr = node.getChild(2).getText().toLowerCase(); + final String counterValueStr = PlanUtils.stripQuotes( + node.getChild(2).getText().toLowerCase()); if (counterName.isEmpty()) { throw new IllegalArgumentException("Counter name cannot be empty!"); } diff --git ql/src/test/org/apache/hadoop/hive/ql/wm/TestExpressionFactory.java ql/src/test/org/apache/hadoop/hive/ql/wm/TestExpressionFactory.java index 074794c357..71e92c7be7 100644 --- ql/src/test/org/apache/hadoop/hive/ql/wm/TestExpressionFactory.java +++ ql/src/test/org/apache/hadoop/hive/ql/wm/TestExpressionFactory.java @@ -35,31 +35,31 @@ public void testSize() { assertEquals("BYTES_READ", expr.getCounterLimit().getName()); assertEquals(5, expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("BYTES_READ > 5kb"); + expr = ExpressionFactory.fromString("BYTES_READ > '5kb'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("BYTES_READ", expr.getCounterLimit().getName()); assertEquals(5 * (1 << 10), expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("BYTES_READ > 2mb"); + expr = ExpressionFactory.fromString("BYTES_READ > '2mb'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("BYTES_READ", expr.getCounterLimit().getName()); assertEquals(2 * (1 << 20), expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("BYTES_READ > 3gb"); + expr = ExpressionFactory.fromString("BYTES_READ > '3gb'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("BYTES_READ", expr.getCounterLimit().getName()); assertEquals(3L * (1 << 30), expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("SHUFFLE_BYTES > 7tb"); + expr = ExpressionFactory.fromString("SHUFFLE_BYTES > '7tb'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("SHUFFLE_BYTES", expr.getCounterLimit().getName()); assertEquals(7L * (1L << 40), expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("SHUFFLE_BYTES > 6pb"); + expr = ExpressionFactory.fromString("SHUFFLE_BYTES > '6pb'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("SHUFFLE_BYTES", expr.getCounterLimit().getName()); @@ -82,31 +82,31 @@ public void testTime() { assertEquals("ELAPSED_TIME", expr.getCounterLimit().getName()); assertEquals(1, expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("ELAPSED_TIME > 1ms"); + expr = ExpressionFactory.fromString("ELAPSED_TIME > '1ms'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("ELAPSED_TIME", expr.getCounterLimit().getName()); assertEquals(1, expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("ELAPSED_TIME > 1sec"); + expr = ExpressionFactory.fromString("ELAPSED_TIME > '1sec'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("ELAPSED_TIME", expr.getCounterLimit().getName()); assertEquals(1000, expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("ELAPSED_TIME > 1min"); + expr = ExpressionFactory.fromString("ELAPSED_TIME > '1min'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("ELAPSED_TIME", expr.getCounterLimit().getName()); assertEquals(60 * 1000, expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("ELAPSED_TIME > 1hour"); + expr = ExpressionFactory.fromString("ELAPSED_TIME > '1hour'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("ELAPSED_TIME", expr.getCounterLimit().getName()); assertEquals(3600 * 1000, expr.getCounterLimit().getLimit()); - expr = ExpressionFactory.fromString("ELAPSED_TIME > 1day"); + expr = ExpressionFactory.fromString("ELAPSED_TIME > '1day'"); assertNotNull(expr); assertEquals(Predicate.GREATER_THAN, expr.getPredicate()); assertEquals("ELAPSED_TIME", expr.getCounterLimit().getName()); diff --git ql/src/test/org/apache/hadoop/hive/ql/wm/TestTrigger.java ql/src/test/org/apache/hadoop/hive/ql/wm/TestTrigger.java index 3953d4c7f4..b86c58a859 100644 --- ql/src/test/org/apache/hadoop/hive/ql/wm/TestTrigger.java +++ ql/src/test/org/apache/hadoop/hive/ql/wm/TestTrigger.java @@ -193,19 +193,19 @@ public void testExpressionFromString() { @Test public void testSizeValidationInTrigger() { - Expression expression = ExpressionFactory.fromString(" SHUFFLE_BYTES > 100MB"); + Expression expression = ExpressionFactory.fromString(" SHUFFLE_BYTES > '100MB'"); Expression expected = ExpressionFactory.createExpression(new FileSystemCounterLimit("", FileSystemCounterLimit.FSCounter.SHUFFLE_BYTES, 100 * 1024 * 1024)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" SHUFFLE_BYTES > 1gB"); + expression = ExpressionFactory.fromString(" SHUFFLE_BYTES > '1gB'"); expected = ExpressionFactory.createExpression(new FileSystemCounterLimit("", FileSystemCounterLimit.FSCounter.SHUFFLE_BYTES, 1024 * 1024 * 1024)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" SHUFFLE_BYTES > 1TB"); + expression = ExpressionFactory.fromString(" SHUFFLE_BYTES > '1TB'"); expected = ExpressionFactory.createExpression(new FileSystemCounterLimit("", FileSystemCounterLimit.FSCounter.SHUFFLE_BYTES, 1024L * 1024 * 1024 * 1024)); assertEquals(expected, expression); @@ -227,69 +227,67 @@ public void testSizeValidationInTrigger() { @Test public void testIllegalSizeCounterValue1() { thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Invalid expression: SHUFFLE_BYTES > 300GiB"); - ExpressionFactory.fromString(" SHUFFLE_BYTES > 300GiB"); + ExpressionFactory.fromString(" SHUFFLE_BYTES > '300GiB'"); } @Test public void testIllegalSizeCounterValue2() { thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Invalid expression: SHUFFLE_BYTES > 300 foo"); - ExpressionFactory.fromString(" SHUFFLE_BYTES > 300 foo"); + ExpressionFactory.fromString(" SHUFFLE_BYTES > '300 foo'"); } @Test public void testTimeValidationInTrigger() { - Expression expression = ExpressionFactory.fromString(" elapsed_TIME > 300sec"); + Expression expression = ExpressionFactory.fromString(" elapsed_TIME > '300sec'"); Expression expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 300000)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" elapsed_TIME > 300seconds"); + expression = ExpressionFactory.fromString(" elapsed_TIME > '300seconds'"); expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 300000)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" elapsed_TIME > 300sec"); + expression = ExpressionFactory.fromString(" elapsed_TIME > '300sec'"); expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 300000)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" elapsed_TIME > 300second"); + expression = ExpressionFactory.fromString(" elapsed_TIME > '300second'"); expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 300000)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" elapsed_TIME > 300seconds"); + expression = ExpressionFactory.fromString(" elapsed_TIME > '300seconds'"); expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 300000)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" elapsed_TIME > 300sec"); + expression = ExpressionFactory.fromString(" elapsed_TIME > '300sec'"); expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 300000)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" elapsed_TIME > 300000ms"); + expression = ExpressionFactory.fromString(" elapsed_TIME > '300000ms'"); expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 300000)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" elapsed_TIME > 300000000microseconds"); + expression = ExpressionFactory.fromString(" elapsed_TIME > '300000000microseconds'"); expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 300000)); assertEquals(expected, expression); assertEquals(expected.hashCode(), expression.hashCode()); - expression = ExpressionFactory.fromString(" elapsed_TIME > 1DAY"); + expression = ExpressionFactory.fromString(" elapsed_TIME > '1DAY'"); expected = ExpressionFactory.createExpression(new TimeCounterLimit(TimeCounterLimit.TimeCounter .ELAPSED_TIME, 24 * 60 * 60 * 1000)); assertEquals(expected, expression); @@ -299,15 +297,13 @@ public void testTimeValidationInTrigger() { @Test public void testIllegalTimeCounterValue1() { thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Invalid expression: elapsed_TIME > 300lightyears"); - ExpressionFactory.fromString(" elapsed_TIME > 300lightyears"); + ExpressionFactory.fromString(" elapsed_TIME > '300lightyears'"); } @Test public void testIllegalTimeCounterValue2() { thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Invalid expression: elapsed_TIME > 300secTOR"); - ExpressionFactory.fromString(" elapsed_TIME > 300secTOR"); + ExpressionFactory.fromString(" elapsed_TIME > '300secTOR'"); } @Test @@ -383,23 +379,26 @@ public void testIllegalExpressionsMultipleCounters() { } @Test + public void testIllegalExpressionsNoQuoutes() { + thrown.expect(IllegalArgumentException.class); + ExpressionFactory.fromString("BYTES_READ > 1mb"); + } + + @Test public void testIllegalExpressionsInvalidLimitPost() { thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Invalid expression: BYTES_READ > 1024aaaa"); - ExpressionFactory.fromString("BYTES_READ > 1024aaaa"); + ExpressionFactory.fromString("BYTES_READ > '1024aaaa'"); } @Test public void testIllegalExpressionsInvalidLimitPre() { thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Invalid expression: BYTES_READ > foo1024"); ExpressionFactory.fromString("BYTES_READ > foo1024"); } @Test public void testIllegalExpressionsInvalidNegativeLimit() { thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Invalid expression: BYTES_READ > -1024"); ExpressionFactory.fromString("BYTES_READ > -1024"); } } diff --git ql/src/test/queries/clientpositive/authorization_wm.q ql/src/test/queries/clientpositive/authorization_wm.q index 0a2b50431f..95019f1987 100644 --- ql/src/test/queries/clientpositive/authorization_wm.q +++ ql/src/test/queries/clientpositive/authorization_wm.q @@ -20,14 +20,14 @@ explain authorization show resource plan rp; explain authorization alter resource plan rp set query_parallelism = 5; explain authorization drop resource plan rp; explain authorization create pool rp.pool0 WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default'; -explain authorization create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL; +explain authorization create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL; explain authorization create user mapping 'joe' IN rp UNMANAGED; show resource plans; show resource plan rp; alter resource plan rp set query_parallelism = 5; drop resource plan rp; create pool rp.pool0 WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default'; -create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL; +create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL; create user mapping 'joe' IN rp UNMANAGED; set user.name=hive_admin_user; @@ -37,7 +37,7 @@ explain authorization show resource plan rp; explain authorization alter resource plan rp set query_parallelism = 5; explain authorization drop resource plan rp; explain authorization create pool rp.pool0 WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default'; -explain authorization create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL; +explain authorization create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL; explain authorization create user mapping 'joe' IN rp UNMANAGED; show resource plans; show resource plan rp; @@ -45,18 +45,18 @@ alter resource plan rp set query_parallelism = 5; drop resource plan rp; create resource plan rp; create pool rp.pool0 WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default'; -create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL; +create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL; create user mapping 'joe' IN rp UNMANAGED; set user.name=ruser1; explain authorization alter pool rp.pool0 SET QUERY_PARALLELISM=4; -explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL; +explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL; explain authorization alter user mapping 'joe' IN rp TO pool0; explain authorization drop user mapping 'joe' IN rp; explain authorization drop pool rp.pool0; explain authorization drop trigger rp.trigger0; alter pool rp.pool0 SET QUERY_PARALLELISM=4; -alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL; +alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL; alter user mapping 'joe' IN rp TO pool0; drop user mapping 'joe' IN rp; drop pool rp.pool0; @@ -65,13 +65,13 @@ drop trigger rp.trigger0; set user.name=hive_admin_user; set role ADMIN; explain authorization alter pool rp.pool0 SET QUERY_PARALLELISM=4; -explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL; +explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL; explain authorization alter user mapping 'joe' IN rp TO pool0; explain authorization drop user mapping 'joe' IN rp; explain authorization drop pool rp.pool0; explain authorization drop trigger rp.trigger0; alter pool rp.pool0 SET QUERY_PARALLELISM=4; -alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL; +alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL; alter user mapping 'joe' IN rp TO pool0; drop user mapping 'joe' IN rp; drop pool rp.pool0; diff --git ql/src/test/queries/clientpositive/resourceplan.q ql/src/test/queries/clientpositive/resourceplan.q index 4cbdede342..0c44da3c20 100644 --- ql/src/test/queries/clientpositive/resourceplan.q +++ ql/src/test/queries/clientpositive/resourceplan.q @@ -148,40 +148,46 @@ SELECT * FROM SYS.WM_RESOURCEPLANS; -- Create trigger commands. -- +-- Test that WM literals do not cause conflicts. +create table wm_test(key string); +select key as 30min from wm_test; +select "10kb" as str from wm_test; +drop table wm_test; + CREATE RESOURCE PLAN plan_1; -CREATE TRIGGER plan_1.trigger_1 WHEN BYTES_READ > 10kb DO KILL; +CREATE TRIGGER plan_1.trigger_1 WHEN BYTES_READ > '10kb' DO KILL; SELECT * FROM SYS.WM_TRIGGERS; -- Duplicate should fail. CREATE TRIGGER plan_1.trigger_1 WHEN ELAPSED_TIME > 300 DO KILL; -- Invalid triggers should fail. -CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > 30sec AND BYTES_READ > 10 DO MOVE TO slow_pool; -CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > 30second OR BYTES_READ > 10 DO MOVE TO slow_pool; -CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME >= 30seconds DO MOVE TO slow_pool; -CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME < 30hour DO MOVE TO slow_pool; -CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME <= 30min DO MOVE TO slow_pool; -CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME = 0day DO MOVE TO slow_pool; - -CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > 30hour DO MOVE TO slow_pool; +CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '30sec' AND BYTES_READ > 10 DO MOVE TO slow_pool; +CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '30second' OR BYTES_READ > 10 DO MOVE TO slow_pool; +CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME >= '30seconds' DO MOVE TO slow_pool; +CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME < '30hour' DO MOVE TO slow_pool; +CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME <= '30min' DO MOVE TO slow_pool; +CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME = '0day' DO MOVE TO slow_pool; + +CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '30hour' DO MOVE TO slow_pool; SELECT * FROM SYS.WM_TRIGGERS; -ALTER TRIGGER plan_1.trigger_1 WHEN BYTES_READ > 1min DO KILL; +ALTER TRIGGER plan_1.trigger_1 WHEN BYTES_READ > '1min' DO KILL; SELECT * FROM SYS.WM_TRIGGERS; DROP TRIGGER plan_1.trigger_1; SELECT * FROM SYS.WM_TRIGGERS; -- No edit on active resource plan. -CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ > 100mb DO MOVE TO null_pool; +CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ > '100mb' DO MOVE TO null_pool; -- Add trigger with reserved keywords. -CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > 100KB DO MOVE TO `table`; -CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > 100MB DO MOVE TO `default`; -CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > 1GB DO MOVE TO `default`; +CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > '100KB' DO MOVE TO `table`; +CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > '100MB' DO MOVE TO `default`; +CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > "1GB" DO MOVE TO `default`; CREATE TRIGGER `table`.`trigger1` WHEN ELAPSED_TIME > 10 DO KILL; -CREATE TRIGGER `table`.`trigger2` WHEN ELAPSED_TIME > 1hour DO KILL; +CREATE TRIGGER `table`.`trigger2` WHEN ELAPSED_TIME > '1hour' DO KILL; SELECT * FROM SYS.WM_TRIGGERS; DROP TRIGGER `table`.`database`; SELECT * FROM SYS.WM_TRIGGERS; @@ -190,13 +196,13 @@ SELECT * FROM SYS.WM_TRIGGERS; ALTER RESOURCE PLAN plan_1 ENABLE; SELECT * FROM SYS.WM_RESOURCEPLANS; DROP TRIGGER plan_1.trigger_2; -ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > 1000gb DO KILL; +ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > "1000gb" DO KILL; -- Cannot drop/change trigger from active plan. ALTER RESOURCE PLAN plan_1 ACTIVATE; SELECT * FROM SYS.WM_RESOURCEPLANS; DROP TRIGGER plan_1.trigger_2; -ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > 1000KB DO KILL; +ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > "1000KB" DO KILL; -- Once disabled we should be able to change it. ALTER RESOURCE PLAN plan_2 DISABLE; @@ -383,8 +389,8 @@ SELECT * FROM SYS.WM_MAPPINGS; CREATE RESOURCE PLAN plan_4a LIKE plan_4; CREATE POOL plan_4a.pool1 WITH SCHEDULING_POLICY='fair', QUERY_PARALLELISM=2, ALLOC_FRACTION=0.0; CREATE USER MAPPING "user1" IN plan_4a TO pool1; -CREATE TRIGGER plan_4a.trigger_1 WHEN BYTES_READ > 10GB DO KILL; -CREATE TRIGGER plan_4a.trigger_2 WHEN BYTES_READ > 11GB DO KILL; +CREATE TRIGGER plan_4a.trigger_1 WHEN BYTES_READ > '10GB' DO KILL; +CREATE TRIGGER plan_4a.trigger_2 WHEN BYTES_READ > '11GB' DO KILL; ALTER POOL plan_4a.pool1 ADD TRIGGER trigger_2; CREATE RESOURCE PLAN plan_4b LIKE plan_4a; diff --git ql/src/test/results/clientpositive/authorization_wm.q.out ql/src/test/results/clientpositive/authorization_wm.q.out index 8a2a6d62d8..405a4a4b91 100644 --- ql/src/test/results/clientpositive/authorization_wm.q.out +++ ql/src/test/results/clientpositive/authorization_wm.q.out @@ -97,9 +97,9 @@ OPERATION: CREATE_POOL AUTHORIZATION_FAILURES: Permission denied: Principal [name=ruser1, type=USER] does not have following privileges for operation CREATE_POOL [ADMIN PRIVILEGE on INPUT, ADMIN PRIVILEGE on OUTPUT] -PREHOOK: query: explain authorization create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL +PREHOOK: query: explain authorization create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL PREHOOK: type: CREATE TRIGGER -POSTHOOK: query: explain authorization create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL +POSTHOOK: query: explain authorization create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL POSTHOOK: type: CREATE TRIGGER INPUTS: OUTPUTS: @@ -189,9 +189,9 @@ CURRENT_USER: hive_admin_user OPERATION: CREATE_POOL -PREHOOK: query: explain authorization create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL +PREHOOK: query: explain authorization create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL PREHOOK: type: CREATE TRIGGER -POSTHOOK: query: explain authorization create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL +POSTHOOK: query: explain authorization create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL POSTHOOK: type: CREATE TRIGGER INPUTS: OUTPUTS: @@ -245,10 +245,10 @@ PREHOOK: type: CREATE POOL PREHOOK: Output: dummyHostnameForTest POSTHOOK: query: create pool rp.pool0 WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default' POSTHOOK: type: CREATE POOL -PREHOOK: query: create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL +PREHOOK: query: create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: create trigger rp.trigger0 WHEN BYTES_READ > 10GB DO KILL +POSTHOOK: query: create trigger rp.trigger0 WHEN BYTES_READ > '10GB' DO KILL POSTHOOK: type: CREATE TRIGGER PREHOOK: query: create user mapping 'joe' IN rp UNMANAGED PREHOOK: type: CREATE MAPPING @@ -268,9 +268,9 @@ OPERATION: ALTER_POOL AUTHORIZATION_FAILURES: Permission denied: Principal [name=ruser1, type=USER] does not have following privileges for operation ALTER_POOL [ADMIN PRIVILEGE on INPUT, ADMIN PRIVILEGE on OUTPUT] -PREHOOK: query: explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL +PREHOOK: query: explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL PREHOOK: type: ALTER TRIGGER -POSTHOOK: query: explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL +POSTHOOK: query: explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL POSTHOOK: type: ALTER TRIGGER INPUTS: OUTPUTS: @@ -354,9 +354,9 @@ CURRENT_USER: hive_admin_user OPERATION: ALTER_POOL -PREHOOK: query: explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL +PREHOOK: query: explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL PREHOOK: type: ALTER TRIGGER -POSTHOOK: query: explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL +POSTHOOK: query: explain authorization alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL POSTHOOK: type: ALTER TRIGGER INPUTS: OUTPUTS: @@ -414,10 +414,10 @@ PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest POSTHOOK: query: alter pool rp.pool0 SET QUERY_PARALLELISM=4 POSTHOOK: type: ALTER POOL -PREHOOK: query: alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL +PREHOOK: query: alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL PREHOOK: type: ALTER TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: alter trigger rp.trigger0 WHEN BYTES_READ > 15GB DO KILL +POSTHOOK: query: alter trigger rp.trigger0 WHEN BYTES_READ > '15GB' DO KILL POSTHOOK: type: ALTER TRIGGER PREHOOK: query: alter user mapping 'joe' IN rp TO pool0 PREHOOK: type: ALTER MAPPING diff --git ql/src/test/results/clientpositive/llap/resourceplan.q.out ql/src/test/results/clientpositive/llap/resourceplan.q.out index c7e9638ea1..68cbf09bf7 100644 --- ql/src/test/results/clientpositive/llap/resourceplan.q.out +++ ql/src/test/results/clientpositive/llap/resourceplan.q.out @@ -3622,15 +3622,47 @@ POSTHOOK: Input: sys@wm_resourceplans #### A masked pattern was here #### plan_2 ACTIVE 10 default table DISABLED 1 default +PREHOOK: query: create table wm_test(key string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: INFORMATION_SCHEMA@wm_test +PREHOOK: Output: database:information_schema +POSTHOOK: query: create table wm_test(key string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: INFORMATION_SCHEMA@wm_test +POSTHOOK: Output: database:information_schema +PREHOOK: query: select key as 30min from wm_test +PREHOOK: type: QUERY +PREHOOK: Input: information_schema@wm_test +#### A masked pattern was here #### +POSTHOOK: query: select key as 30min from wm_test +POSTHOOK: type: QUERY +POSTHOOK: Input: information_schema@wm_test +#### A masked pattern was here #### +PREHOOK: query: select "10kb" as str from wm_test +PREHOOK: type: QUERY +PREHOOK: Input: information_schema@wm_test +#### A masked pattern was here #### +POSTHOOK: query: select "10kb" as str from wm_test +POSTHOOK: type: QUERY +POSTHOOK: Input: information_schema@wm_test +#### A masked pattern was here #### +PREHOOK: query: drop table wm_test +PREHOOK: type: DROPTABLE +PREHOOK: Input: information_schema@wm_test +PREHOOK: Output: information_schema@wm_test +POSTHOOK: query: drop table wm_test +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: information_schema@wm_test +POSTHOOK: Output: information_schema@wm_test PREHOOK: query: CREATE RESOURCE PLAN plan_1 PREHOOK: type: CREATE RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest POSTHOOK: query: CREATE RESOURCE PLAN plan_1 POSTHOOK: type: CREATE RESOURCEPLAN -PREHOOK: query: CREATE TRIGGER plan_1.trigger_1 WHEN BYTES_READ > 10kb DO KILL +PREHOOK: query: CREATE TRIGGER plan_1.trigger_1 WHEN BYTES_READ > '10kb' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: CREATE TRIGGER plan_1.trigger_1 WHEN BYTES_READ > 10kb DO KILL +POSTHOOK: query: CREATE TRIGGER plan_1.trigger_1 WHEN BYTES_READ > '10kb' DO KILL POSTHOOK: type: CREATE TRIGGER PREHOOK: query: SELECT * FROM SYS.WM_TRIGGERS PREHOOK: type: QUERY @@ -3645,16 +3677,16 @@ PREHOOK: query: CREATE TRIGGER plan_1.trigger_1 WHEN ELAPSED_TIME > 300 DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. AlreadyExistsException(message:Trigger already exists, use alter: ) -FAILED: ParseException line 4:58 mismatched input 'AND' expecting DO near '30sec' in create trigger statement -FAILED: ParseException line 2:61 mismatched input 'OR' expecting DO near '30second' in create trigger statement +FAILED: ParseException line 4:60 mismatched input 'AND' expecting DO near ''30sec'' in create trigger statement +FAILED: ParseException line 2:63 mismatched input 'OR' expecting DO near ''30second'' in create trigger statement FAILED: ParseException line 2:50 mismatched input '>=' expecting > near 'ELAPSED_TIME' in comparisionOperator FAILED: ParseException line 2:50 mismatched input '<' expecting > near 'ELAPSED_TIME' in comparisionOperator FAILED: ParseException line 2:50 mismatched input '<=' expecting > near 'ELAPSED_TIME' in comparisionOperator FAILED: ParseException line 2:50 mismatched input '=' expecting > near 'ELAPSED_TIME' in comparisionOperator -PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > 30hour DO MOVE TO slow_pool +PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '30hour' DO MOVE TO slow_pool PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > 30hour DO MOVE TO slow_pool +POSTHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '30hour' DO MOVE TO slow_pool POSTHOOK: type: CREATE TRIGGER PREHOOK: query: SELECT * FROM SYS.WM_TRIGGERS PREHOOK: type: QUERY @@ -3666,10 +3698,10 @@ POSTHOOK: Input: sys@wm_triggers #### A masked pattern was here #### plan_1 trigger_1 BYTES_READ > 10kb KILL plan_1 trigger_2 ELAPSED_TIME > 30hour MOVE TO slow_pool -PREHOOK: query: ALTER TRIGGER plan_1.trigger_1 WHEN BYTES_READ > 1min DO KILL +PREHOOK: query: ALTER TRIGGER plan_1.trigger_1 WHEN BYTES_READ > '1min' DO KILL PREHOOK: type: ALTER TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: ALTER TRIGGER plan_1.trigger_1 WHEN BYTES_READ > 1min DO KILL +POSTHOOK: query: ALTER TRIGGER plan_1.trigger_1 WHEN BYTES_READ > '1min' DO KILL POSTHOOK: type: ALTER TRIGGER PREHOOK: query: SELECT * FROM SYS.WM_TRIGGERS PREHOOK: type: QUERY @@ -3695,34 +3727,34 @@ POSTHOOK: type: QUERY POSTHOOK: Input: sys@wm_triggers #### A masked pattern was here #### plan_1 trigger_2 ELAPSED_TIME > 30hour MOVE TO slow_pool -PREHOOK: query: CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ > 100mb DO MOVE TO null_pool +PREHOOK: query: CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ > '100mb' DO MOVE TO null_pool PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) -PREHOOK: query: CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > 100KB DO MOVE TO `table` +PREHOOK: query: CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > '100KB' DO MOVE TO `table` PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > 100KB DO MOVE TO `table` +POSTHOOK: query: CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > '100KB' DO MOVE TO `table` POSTHOOK: type: CREATE TRIGGER -PREHOOK: query: CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > 100MB DO MOVE TO `default` +PREHOOK: query: CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > '100MB' DO MOVE TO `default` PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > 100MB DO MOVE TO `default` +POSTHOOK: query: CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > '100MB' DO MOVE TO `default` POSTHOOK: type: CREATE TRIGGER -PREHOOK: query: CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > 1GB DO MOVE TO `default` +PREHOOK: query: CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > "1GB" DO MOVE TO `default` PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > 1GB DO MOVE TO `default` +POSTHOOK: query: CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > "1GB" DO MOVE TO `default` POSTHOOK: type: CREATE TRIGGER PREHOOK: query: CREATE TRIGGER `table`.`trigger1` WHEN ELAPSED_TIME > 10 DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest POSTHOOK: query: CREATE TRIGGER `table`.`trigger1` WHEN ELAPSED_TIME > 10 DO KILL POSTHOOK: type: CREATE TRIGGER -PREHOOK: query: CREATE TRIGGER `table`.`trigger2` WHEN ELAPSED_TIME > 1hour DO KILL +PREHOOK: query: CREATE TRIGGER `table`.`trigger2` WHEN ELAPSED_TIME > '1hour' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: CREATE TRIGGER `table`.`trigger2` WHEN ELAPSED_TIME > 1hour DO KILL +POSTHOOK: query: CREATE TRIGGER `table`.`trigger2` WHEN ELAPSED_TIME > '1hour' DO KILL POSTHOOK: type: CREATE TRIGGER PREHOOK: query: SELECT * FROM SYS.WM_TRIGGERS PREHOOK: type: QUERY @@ -3776,7 +3808,7 @@ PREHOOK: query: DROP TRIGGER plan_1.trigger_2 PREHOOK: type: DROP TRIGGER PREHOOK: Output: dummyHostnameForTest FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) -PREHOOK: query: ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > 1000gb DO KILL +PREHOOK: query: ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > "1000gb" DO KILL PREHOOK: type: ALTER TRIGGER PREHOOK: Output: dummyHostnameForTest FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) @@ -3800,7 +3832,7 @@ PREHOOK: query: DROP TRIGGER plan_1.trigger_2 PREHOOK: type: DROP TRIGGER PREHOOK: Output: dummyHostnameForTest FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) -PREHOOK: query: ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > 1000KB DO KILL +PREHOOK: query: ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > "1000KB" DO KILL PREHOOK: type: ALTER TRIGGER PREHOOK: Output: dummyHostnameForTest FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) @@ -4499,15 +4531,15 @@ PREHOOK: type: CREATE MAPPING PREHOOK: Output: dummyHostnameForTest POSTHOOK: query: CREATE USER MAPPING "user1" IN plan_4a TO pool1 POSTHOOK: type: CREATE MAPPING -PREHOOK: query: CREATE TRIGGER plan_4a.trigger_1 WHEN BYTES_READ > 10GB DO KILL +PREHOOK: query: CREATE TRIGGER plan_4a.trigger_1 WHEN BYTES_READ > '10GB' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: CREATE TRIGGER plan_4a.trigger_1 WHEN BYTES_READ > 10GB DO KILL +POSTHOOK: query: CREATE TRIGGER plan_4a.trigger_1 WHEN BYTES_READ > '10GB' DO KILL POSTHOOK: type: CREATE TRIGGER -PREHOOK: query: CREATE TRIGGER plan_4a.trigger_2 WHEN BYTES_READ > 11GB DO KILL +PREHOOK: query: CREATE TRIGGER plan_4a.trigger_2 WHEN BYTES_READ > '11GB' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -POSTHOOK: query: CREATE TRIGGER plan_4a.trigger_2 WHEN BYTES_READ > 11GB DO KILL +POSTHOOK: query: CREATE TRIGGER plan_4a.trigger_2 WHEN BYTES_READ > '11GB' DO KILL POSTHOOK: type: CREATE TRIGGER PREHOOK: query: ALTER POOL plan_4a.pool1 ADD TRIGGER trigger_2 PREHOOK: type: ALTER POOL