Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1530480)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -774,7 +774,7 @@
HIVE_SERVER2_TABLE_TYPE_MAPPING("hive.server2.table.type.mapping", "CLASSIC"),
HIVE_SERVER2_SESSION_HOOK("hive.server2.session.hook", ""),
- HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,delete"),
+ HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,delete,compile"),
HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", ""),
Index: ivy/libraries.properties
===================================================================
--- ivy/libraries.properties (revision 1530480)
+++ ivy/libraries.properties (working copy)
@@ -17,7 +17,7 @@
# project configuration files.
# These are the versions of our dependencies (in alphabetical order)
-apacheant.version=1.7.1
+apacheant.version=1.8.4
ant-contrib.version=1.0b3
ant-task.version=2.0.10
antlr.version=3.4
Index: ql/ivy.xml
===================================================================
--- ql/ivy.xml (revision 1530480)
+++ ql/ivy.xml (working copy)
@@ -51,6 +51,7 @@
+
Index: ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (revision 1530480)
+++ ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (working copy)
@@ -19,16 +19,17 @@
package org.apache.hadoop.hive.ql.processors;
import static org.apache.commons.lang.StringUtils.isBlank;
+
+import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
-import java.sql.SQLException;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.conf.HiveConf;
/**
* CommandProcessorFactory.
@@ -75,6 +76,8 @@
return new AddResourceProcessor();
case DELETE:
return new DeleteResourceProcessor();
+ case COMPILE:
+ return new CompileProcessor();
default:
throw new AssertionError("Unknown HiveCommand " + hiveCommand);
}
Index: ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java (revision 1530480)
+++ ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java (working copy)
@@ -30,7 +30,8 @@
RESET(),
DFS(),
ADD(),
- DELETE();
+ DELETE(),
+ COMPILE();
private static final Set COMMANDS = new HashSet();
static {
for (HiveCommand command : HiveCommand.values()) {
Index: ql/src/test/queries/clientpositive/compile_processor.q
===================================================================
--- ql/src/test/queries/clientpositive/compile_processor.q (revision 0)
+++ ql/src/test/queries/clientpositive/compile_processor.q (working copy)
@@ -0,0 +1,11 @@
+
+compile `import org.apache.hadoop.hive.ql.exec.UDF \;
+public class Pyth extends UDF {
+ public double evaluate(double a, double b){
+ return Math.sqrt((a*a) + (b*b)) \;
+ }
+} ` AS GROOVY NAMED Pyth.groovy;
+CREATE TEMPORARY FUNCTION Pyth as 'Pyth';
+
+SELECT Pyth(3,4) FROM src limit 1;
+
Index: ql/src/test/results/clientpositive/compile_processor.q.out
===================================================================
--- ql/src/test/results/clientpositive/compile_processor.q.out (revision 0)
+++ ql/src/test/results/clientpositive/compile_processor.q.out (working copy)
@@ -0,0 +1,13 @@
+PREHOOK: query: CREATE TEMPORARY FUNCTION Pyth as 'Pyth'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: CREATE TEMPORARY FUNCTION Pyth as 'Pyth'
+POSTHOOK: type: CREATEFUNCTION
+PREHOOK: query: SELECT Pyth(3,4) FROM src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT Pyth(3,4) FROM src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5.0