diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 57e55ff..c1774a2 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -845,6 +845,7 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { "In test mode, specifies comma separated table names which would not apply sampling"), HIVETESTMODEDUMMYSTATAGGR("hive.test.dummystats.aggregator", "", "internal variable for test"), HIVETESTMODEDUMMYSTATPUB("hive.test.dummystats.publisher", "", "internal variable for test"), + HIVETESTCURRENTTIMESTAMP("hive.test.currenttimestamp", null, "current timestamp for test"), HIVEMERGEMAPFILES("hive.merge.mapfiles", true, "Merge small files at the end of a map-only job"), diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 0226f28..3f8702e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -380,6 +380,8 @@ public int compile(String command, boolean resetTaskIds) { String queryId = QueryPlan.makeQueryId(); conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId); + SessionState.get().setupQueryCurrentTimestamp(); + try { command = new VariableSubstitution().substitute(conf,command); ctx = new Context(conf); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index d7c4ca7..0d95909 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -304,6 +304,8 @@ registerUDF("~", UDFOPBitNot.class, true); registerGenericUDF("current_database", UDFCurrentDB.class); + registerGenericUDF("current_date", GenericUDFCurrentDate.class); + registerGenericUDF("current_timestamp", GenericUDFCurrentTimestamp.class); registerGenericUDF("isnull", GenericUDFOPNull.class); registerGenericUDF("isnotnull", GenericUDFOPNotNull.class); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g index f412010..20c73cd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g @@ -270,6 +270,8 @@ KW_UNBOUNDED: 'UNBOUNDED'; KW_PRECEDING: 'PRECEDING'; KW_FOLLOWING: 'FOLLOWING'; KW_CURRENT: 'CURRENT'; +KW_CURRENT_DATE: 'CURRENT_DATE'; +KW_CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'; KW_LESS: 'LESS'; KW_MORE: 'MORE'; KW_OVER: 'OVER'; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g index c960a6b..bbb8eb3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g @@ -173,11 +173,29 @@ function -> ^(TOK_FUNCTIONDI functionName (selectExpression+)?) ; +nonParenthesizedFunction +@init { gParent.pushMsg("non-parenthesized function name", state); } +@after { gParent.popMsg(state); } + : + nonParenthesizedFunctionName + -> ^(TOK_FUNCTION nonParenthesizedFunctionName) + ; + +nonParenthesizedFunctionName +@init { gParent.pushMsg("non-parenthesized function name", state); } +@after { gParent.popMsg(state); } + : + KW_CURRENT_DATE | KW_CURRENT_TIMESTAMP + ; + functionName @init { gParent.pushMsg("function name", state); } @after { gParent.popMsg(state); } : // Keyword IF is also a function name KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT | KW_UNIONTYPE | functionIdentifier + | + // This allows current_timestamp() to work as well as current_timestamp + nonParenthesizedFunctionName ; castExpression @@ -273,6 +291,7 @@ atomExpression | castExpression | caseExpression | whenExpression + | nonParenthesizedFunction | (functionName LPAREN) => function | tableOrColumn | LPAREN! expression RPAREN! diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index f45b20a..c7ea61c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -25,6 +25,7 @@ import java.io.PrintStream; import java.net.URI; import java.net.URLClassLoader; +import java.sql.Timestamp; import java.util.*; import org.apache.commons.io.FileUtils; @@ -246,6 +247,11 @@ private final Set preReloadableAuxJars = new HashSet(); /** + * CURRENT_TIMESTAMP value for query + */ + private Timestamp queryCurrentTimestamp; + + /** * Get the lineage state stored in this session. * * @return LineageState @@ -1388,4 +1394,27 @@ public String getNextValuesTempTableSuffix() { return Integer.toString(nextValueTempTableSuffix++); } + /** + * Initialize current timestamp, other necessary query initialization. + */ + public void setupQueryCurrentTimestamp() { + queryCurrentTimestamp = new Timestamp(System.currentTimeMillis()); + + // Provide a facility to set current timestamp during tests + if (conf.getBoolVar(ConfVars.HIVE_IN_TEST)) { + String overrideTimestampString = + HiveConf.getVar(conf, HiveConf.ConfVars.HIVETESTCURRENTTIMESTAMP, null); + if (overrideTimestampString != null && overrideTimestampString.length() > 0) { + queryCurrentTimestamp = Timestamp.valueOf(overrideTimestampString); + } + } + } + + /** + * Get query current timestamp + * @return + */ + public Timestamp getQueryCurrentTimestamp() { + return queryCurrentTimestamp; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java new file mode 100644 index 0000000..5fa3ec4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java @@ -0,0 +1,50 @@ +package org.apache.hadoop.hive.ql.udf.generic; + +import java.sql.Date; +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.ql.udf.UDFType; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; + +@Description(name = "current_date", value = "_FUNC_() - Returns the current date") +@UDFType(deterministic = false) +public class GenericUDFCurrentDate extends GenericUDF { + + DateWritable currentDate = null; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) + throws UDFArgumentException { + if (arguments.length != 0) { + throw new UDFArgumentLengthException( + "The function CURRENT_DATE does not take any arguments, but found " + + arguments.length); + } + + if (currentDate == null) { + long currentTimeMillis = SessionState.get().getQueryCurrentTimestamp().getTime(); + Date dateVal = new Date(currentTimeMillis); + // Get date based on millis value of midnight boundary. + dateVal = Date.valueOf(dateVal.toString()); + currentDate = new DateWritable(dateVal); + } + + return PrimitiveObjectInspectorFactory.writableDateObjectInspector; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + return currentDate; + } + + @Override + public String getDisplayString(String[] children) { + return "CURRENT_DATE()"; + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java new file mode 100644 index 0000000..c99ac2b --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java @@ -0,0 +1,45 @@ +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.ql.udf.UDFType; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; + +@Description(name = "current_timestamp", value = "_FUNC_() - Returns the current timestamp") +@UDFType(deterministic = false) +public class GenericUDFCurrentTimestamp extends GenericUDF { + + TimestampWritable currentTimestamp = null; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) + throws UDFArgumentException { + if (arguments.length != 0) { + throw new UDFArgumentLengthException( + "The function CURRENT_TIMESTAMP does not take any arguments, but found " + + arguments.length); + } + + if (currentTimestamp == null) { + currentTimestamp = new TimestampWritable(SessionState.get().getQueryCurrentTimestamp()); + } + + return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + return currentTimestamp; + } + + @Override + public String getDisplayString(String[] children) { + return "CURRENT_TIMESTAMP()"; + } + +} diff --git ql/src/test/queries/clientpositive/current_date_timestamp.q ql/src/test/queries/clientpositive/current_date_timestamp.q new file mode 100644 index 0000000..9bed885 --- /dev/null +++ ql/src/test/queries/clientpositive/current_date_timestamp.q @@ -0,0 +1,4 @@ +select current_timestamp = current_timestamp(), current_date = current_date() from src limit 5; + +set hive.test.currenttimestamp =2012-01-01 01:02:03; +select current_date, current_timestamp from src limit 5; diff --git ql/src/test/results/clientpositive/current_date_timestamp.q.out ql/src/test/results/clientpositive/current_date_timestamp.q.out new file mode 100644 index 0000000..9c26095 --- /dev/null +++ ql/src/test/results/clientpositive/current_date_timestamp.q.out @@ -0,0 +1,26 @@ +PREHOOK: query: select current_timestamp = current_timestamp(), current_date = current_date() from src limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select current_timestamp = current_timestamp(), current_date = current_date() from src limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +true true +true true +true true +true true +true true +PREHOOK: query: select current_date, current_timestamp from src limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select current_date, current_timestamp from src limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +2012-01-01 2012-01-01 01:02:03 +2012-01-01 2012-01-01 01:02:03 +2012-01-01 2012-01-01 01:02:03 +2012-01-01 2012-01-01 01:02:03 +2012-01-01 2012-01-01 01:02:03