diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 53dfdd92dedcb054ba944a32615efe94a9bd181f..8a3da464d804853a0ec6aefba9e863e9935d4ab4 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1313,6 +1313,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal HIVETESTMODEROLLBACKTXN("hive.test.rollbacktxn", false, "For testing only. Will mark every ACID transaction aborted", false), HIVETESTMODEFAILCOMPACTION("hive.test.fail.compaction", false, "For testing only. Will cause CompactorMR to fail.", false), HIVETESTMODEFAILHEARTBEATER("hive.test.fail.heartbeater", false, "For testing only. Will cause Heartbeater to fail.", false), + HIVE_QUERY_TIMESTAMP("hive.query.timestamp", System.currentTimeMillis(), "query execute time."), HIVEMERGEMAPFILES("hive.merge.mapfiles", true, "Merge small files at the end of a map-only job"), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 9768793edb4578c2a267dde46a25b72caa87a4b3..1db497f70d4fa2102f288da50e977fcf32b45caf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -1781,6 +1781,7 @@ public String getNextValuesTempTableSuffix() { */ public void setupQueryCurrentTimestamp() { queryCurrentTimestamp = new Timestamp(System.currentTimeMillis()); + sessionConf.setLongVar(ConfVars.HIVE_QUERY_TIMESTAMP, queryCurrentTimestamp.getTime()); // Provide a facility to set current timestamp during tests if (sessionConf.getBoolVar(ConfVars.HIVE_IN_TEST)) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java index 1f027a2cefbe9c0eb1ec5a5e5cb63980a9a16a6c..1a985c878acfea1b2e3fc8ba5123f00fa3a44ab5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java @@ -18,8 +18,12 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; +import java.sql.Timestamp; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -39,6 +43,13 @@ public class GenericUDFCurrentDate extends GenericUDF { protected DateWritable currentDate; + private Configuration conf; + + @Override + public void configure(MapredContext context) { + super.configure(context); + conf = context.getJobConf(); + } @Override public ObjectInspector initialize(ObjectInspector[] arguments) @@ -50,8 +61,21 @@ public ObjectInspector initialize(ObjectInspector[] arguments) } if (currentDate == null) { + SessionState ss = SessionState.get(); + Timestamp queryTimestamp; + if (ss == null) { + if (conf == null) { + queryTimestamp = new Timestamp(System.currentTimeMillis()); + } else { + queryTimestamp = new Timestamp( + HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVE_QUERY_TIMESTAMP)); + } + } else { + queryTimestamp = ss.getQueryCurrentTimestamp(); + } + Date dateVal = - Date.valueOf(SessionState.get().getQueryCurrentTimestamp().toString().substring(0, 10)); + Date.valueOf(queryTimestamp.toString().substring(0, 10)); currentDate = new DateWritable(dateVal); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java index 2f13a22558a5586f133f8d3de70fc66ce758e5cb..e474c688490ecbe608afe246ced84933d0282653 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java @@ -17,7 +17,10 @@ */ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -27,6 +30,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import java.sql.Timestamp; + // If there is a new UDFType to describe a function that is deterministic within a query // but changes value between queries, this function would fall into that category. @UDFType(deterministic = true) @@ -37,6 +42,13 @@ public class GenericUDFCurrentTimestamp extends GenericUDF { protected TimestampWritable currentTimestamp; + private Configuration conf; + + @Override + public void configure(MapredContext context) { + super.configure(context); + conf = context.getJobConf(); + } @Override public ObjectInspector initialize(ObjectInspector[] arguments) @@ -48,7 +60,19 @@ public ObjectInspector initialize(ObjectInspector[] arguments) } if (currentTimestamp == null) { - currentTimestamp = new TimestampWritable(SessionState.get().getQueryCurrentTimestamp()); + SessionState ss = SessionState.get(); + Timestamp queryTimestamp; + if (ss == null) { + if (conf == null) { + queryTimestamp = new Timestamp(System.currentTimeMillis()); + } else { + queryTimestamp = new Timestamp( + HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVE_QUERY_TIMESTAMP)); + } + } else { + queryTimestamp = ss.getQueryCurrentTimestamp(); + } + currentTimestamp = new TimestampWritable(queryTimestamp); } return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java index 118acdc22a09b9bcf37d3e7c8ed5ebdd1562a60b..9866d2d0b7227b5957e703f627c4c10564c75717 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java @@ -19,7 +19,11 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.io.PrintStream; +import java.sql.Timestamp; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.MapredContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; @@ -40,6 +44,14 @@ public class GenericUDFUnixTimeStamp extends GenericUDFToUnixTimeStamp { private static final Logger LOG = LoggerFactory.getLogger(GenericUDFUnixTimeStamp.class); private LongWritable currentTimestamp; // retValue is transient so store this separately. + private Configuration conf; + + @Override + public void configure(MapredContext context) { + super.configure(context); + conf = context.getJobConf(); + } + @Override protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length > 0) { @@ -47,7 +59,19 @@ protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentEx } else { if (currentTimestamp == null) { currentTimestamp = new LongWritable(0); - setValueFromTs(currentTimestamp, SessionState.get().getQueryCurrentTimestamp()); + SessionState ss = SessionState.get(); + Timestamp queryTimestamp; + if (ss == null) { + if (conf == null) { + queryTimestamp = new Timestamp(System.currentTimeMillis()); + } else { + queryTimestamp = new Timestamp( + HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVE_QUERY_TIMESTAMP)); + } + } else { + queryTimestamp = ss.getQueryCurrentTimestamp(); + } + setValueFromTs(currentTimestamp, queryTimestamp); String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead."; LOG.warn(msg); PrintStream stream = LogHelper.getInfoStream();