diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 88a7cfc70e0471a8d36ad865e8febd5eb281bcb5..a64208a1dce1e73f315dfefcc7eeb2029a1bd1ec 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1843,6 +1843,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal TESTMODE_BUCKET_CODEC_VERSION("hive.test.bucketcodec.version", 1, "For testing only. Will make ACID subsystem write RecordIdentifier.bucketId in specified\n" + "format", false), + HIVE_QUERY_TIMESTAMP("hive.query.timestamp", System.currentTimeMillis(), "query execute time."), HIVEMERGEMAPFILES("hive.merge.mapfiles", true, "Merge small files at the end of a map-only job"), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 6bb756cc08148ae4bb9c935f270579e8abeb717a..9f65a771f95a7c0bd3fdb4e56e47c0fc70235850 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -1924,6 +1924,7 @@ public String getNextValuesTempTableSuffix() { */ public void setupQueryCurrentTimestamp() { queryCurrentTimestamp = new Timestamp(System.currentTimeMillis()); + sessionConf.setLongVar(ConfVars.HIVE_QUERY_TIMESTAMP, queryCurrentTimestamp.getTime()); // Provide a facility to set current timestamp during tests if (sessionConf.getBoolVar(ConfVars.HIVE_IN_TEST)) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java index 7d3c3f46aa1318cb1eaf152d3b9f0ab36ef00ff7..91fd08f13e5cdc28cc80acffea0599e14a45a96e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java @@ -18,8 +18,12 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Date; +import java.sql.Timestamp; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -39,6 +43,13 @@ public class GenericUDFCurrentDate extends GenericUDF { protected DateWritable currentDate; + private Configuration conf; + + @Override + public void configure(MapredContext context) { + super.configure(context); + conf = context.getJobConf(); + } @Override public ObjectInspector initialize(ObjectInspector[] arguments) @@ -50,8 +61,21 @@ public ObjectInspector initialize(ObjectInspector[] arguments) } if (currentDate == null) { + SessionState ss = SessionState.get(); + Timestamp queryTimestamp; + if (ss == null) { + if (conf == null) { + queryTimestamp = new Timestamp(System.currentTimeMillis()); + } else { + queryTimestamp = new Timestamp( + HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVE_QUERY_TIMESTAMP)); + } + } else { + queryTimestamp = ss.getQueryCurrentTimestamp(); + } + Date dateVal = - Date.valueOf(SessionState.get().getQueryCurrentTimestamp().toString().substring(0, 10)); + Date.valueOf(queryTimestamp.toString().substring(0, 10)); currentDate = new DateWritable(dateVal); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java index 9da51c84f51d3dae1eac46f8b1e7eef2e482e6c4..ca43840e372a26accda20386ef4c8679310783fe 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java @@ -17,7 +17,12 @@ */ package org.apache.hadoop.hive.ql.udf.generic; +import java.sql.Timestamp; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -37,6 +42,13 @@ public class GenericUDFCurrentTimestamp extends GenericUDF { protected TimestampWritable currentTimestamp; + private Configuration conf; + + @Override + public void configure(MapredContext context) { + super.configure(context); + conf = context.getJobConf(); + } @Override public ObjectInspector initialize(ObjectInspector[] arguments) @@ -48,7 +60,19 @@ public ObjectInspector initialize(ObjectInspector[] arguments) } if (currentTimestamp == null) { - currentTimestamp = new TimestampWritable(SessionState.get().getQueryCurrentTimestamp()); + SessionState ss = SessionState.get(); + Timestamp queryTimestamp; + if (ss == null) { + if (conf == null) { + queryTimestamp = new Timestamp(System.currentTimeMillis()); + } else { + queryTimestamp = new Timestamp( + HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVE_QUERY_TIMESTAMP)); + } + } else { + queryTimestamp = ss.getQueryCurrentTimestamp(); + } + currentTimestamp = new TimestampWritable(queryTimestamp); } return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java index 832983105f1f453a756a532cf21bcba8b3ae9fd0..6ce72f77037d49571eb1bc5fb647bed0559119cf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java @@ -18,6 +18,11 @@ package org.apache.hadoop.hive.ql.udf.generic; +import java.sql.Timestamp; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.MapredContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; @@ -37,6 +42,14 @@ public class GenericUDFUnixTimeStamp extends GenericUDFToUnixTimeStamp { private static final Logger LOG = LoggerFactory.getLogger(GenericUDFUnixTimeStamp.class); private LongWritable currentTimestamp; // retValue is transient so store this separately. + private Configuration conf; + + @Override + public void configure(MapredContext context) { + super.configure(context); + conf = context.getJobConf(); + } + @Override protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length > 0) { @@ -44,7 +57,19 @@ protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentEx } else { if (currentTimestamp == null) { currentTimestamp = new LongWritable(0); - setValueFromTs(currentTimestamp, SessionState.get().getQueryCurrentTimestamp()); + SessionState ss = SessionState.get(); + Timestamp queryTimestamp; + if (ss == null) { + if (conf == null) { + queryTimestamp = new Timestamp(System.currentTimeMillis()); + } else { + queryTimestamp = new Timestamp( + HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVE_QUERY_TIMESTAMP)); + } + } else { + queryTimestamp = ss.getQueryCurrentTimestamp(); + } + setValueFromTs(currentTimestamp, queryTimestamp); String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead."; SessionState.getConsole().printInfo(msg, false); }