diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java index 2b1d0d351f..4db0714619 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java @@ -95,8 +95,12 @@ private DruidSqlOperatorConverter() { druidOperatorMap.put(HiveToDateSqlOperator.INSTANCE, new DruidToDateOperatorConversion()); druidOperatorMap.put(HiveFromUnixTimeSqlOperator.INSTANCE, new DruidFormUnixTimeOperatorConversion()); druidOperatorMap.put(HiveUnixTimestampSqlOperator.INSTANCE, new DruidUnixTimestampOperatorConversion()); - druidOperatorMap.put(HiveDateAddSqlOperator.INSTANCE, new DruidDateArithmeticOperatorConversion(1, HiveDateAddSqlOperator.INSTANCE)); - druidOperatorMap.put(HiveDateSubSqlOperator.INSTANCE, new DruidDateArithmeticOperatorConversion(-1, HiveDateSubSqlOperator.INSTANCE)); + druidOperatorMap.put(HiveDateAddSqlOperator.INSTANCE, + new DruidDateArithmeticOperatorConversion(1, HiveDateAddSqlOperator.INSTANCE) + ); + druidOperatorMap.put(HiveDateSubSqlOperator.INSTANCE, + new DruidDateArithmeticOperatorConversion(-1, HiveDateSubSqlOperator.INSTANCE) + ); } return druidOperatorMap; } @@ -254,7 +258,8 @@ private DruidSqlOperatorConverter() { // dealing with String type final String format = call.getOperands().size() == 2 ? DruidExpressions .toDruidExpression(call.getOperands().get(1), rowType, query) : DEFAULT_TS_FORMAT; - return DruidExpressions.functionCall("unix_timestamp", ImmutableList.of(arg0, DruidExpressions.stringLiteral(format))); + return DruidExpressions + .functionCall("unix_timestamp", ImmutableList.of(arg0, DruidExpressions.stringLiteral(format))); } } @@ -277,9 +282,12 @@ private DruidSqlOperatorConverter() { } final String numMillis = DruidQuery.format("(%s * '1000')", arg); - final String format = call.getOperands().size() == 1 ? DEFAULT_TS_FORMAT : DruidExpressions - .toDruidExpression(call.getOperands().get(1), rowType, query); - return applyTimestampFormat(numMillis, format, timezoneId(query)); + final String format = + call.getOperands().size() == 1 ? DruidExpressions.stringLiteral(DEFAULT_TS_FORMAT) : DruidExpressions + .toDruidExpression(call.getOperands().get(1), rowType, query); + return DruidExpressions.functionCall("timestamp_format", + ImmutableList.of(numMillis, format, DruidExpressions.stringLiteral(timezoneId(query).getID())) + ); } } @@ -322,7 +330,6 @@ public DruidDateArithmeticOperatorConversion(int direction, SqlOperator operator } } - /** * utility function to extract timezone id from Druid query * @param query Druid Rel diff --git ql/src/test/queries/clientpositive/druidmini_expressions.q ql/src/test/queries/clientpositive/druidmini_expressions.q index d88b281d99..ffc722823b 100644 --- ql/src/test/queries/clientpositive/druidmini_expressions.q +++ ql/src/test/queries/clientpositive/druidmini_expressions.q @@ -87,6 +87,14 @@ select unix_timestamp(from_unixtime(1396681200)) from druid_table_n0 limit 1; explain select unix_timestamp(`__time`) from druid_table_n0 limit 1; select unix_timestamp(`__time`) from druid_table_n0 limit 1; +explain select FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +from druid_table_n0 +GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss'); + +select FROM_UNIXTIME(UNIX_TIMESTAMP (CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +from druid_table_n0 +GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss'); + explain select TRUNC(cast(`__time` as timestamp), 'YY') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YY'); select TRUNC(cast(`__time` as timestamp), 'YY') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YY'); select TRUNC(cast(`__time` as timestamp), 'YEAR') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YEAR'); diff --git ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out index c1e54ea351..e48a96ef6a 100644 --- ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out +++ ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out @@ -955,6 +955,48 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@druid_table_n0 POSTHOOK: Output: hdfs://### HDFS PATH ### -60 +PREHOOK: query: explain select FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +from druid_table_n0 +GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +PREHOOK: type: QUERY +POSTHOOK: query: explain select FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +from druid_table_n0 +GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_n0 + properties: + druid.fieldNames vc + druid.fieldTypes string + druid.query.json {"queryType":"groupBy","dataSource":"default.druid_table_n0","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_format((div(\"__time\",1000) * '1000'),'yyyy-MM-dd HH:mm:ss','US/Pacific')","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]} + druid.query.type groupBy + Select Operator + expressions: vc (type: string) + outputColumnNames: _col0 + ListSink + +PREHOOK: query: select FROM_UNIXTIME(UNIX_TIMESTAMP (CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +from druid_table_n0 +GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +PREHOOK: type: QUERY +PREHOOK: Input: default@druid_table_n0 +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select FROM_UNIXTIME(UNIX_TIMESTAMP (CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +from druid_table_n0 +GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss') +POSTHOOK: type: QUERY +POSTHOOK: Input: default@druid_table_n0 +POSTHOOK: Output: hdfs://### HDFS PATH ### +1969-12-31 15:59:00 +1969-12-31 16:00:00 PREHOOK: query: explain select TRUNC(cast(`__time` as timestamp), 'YY') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YY') PREHOOK: type: QUERY POSTHOOK: query: explain select TRUNC(cast(`__time` as timestamp), 'YY') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YY')