diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java index e7416b1c75..92919e9daf 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java @@ -178,7 +178,7 @@ public MiniDruidCliConfig() { excludeQuery("druid_timestamptz.q"); // Disabled in HIVE-20322 excludeQuery("druidmini_joins.q"); // Disabled in HIVE-20322 excludeQuery("druidmini_masking.q"); // Disabled in HIVE-20322 - excludeQuery("druidmini_test1.q"); // Disabled in HIVE-20322 + //excludeQuery("druidmini_test1.q"); // Disabled in HIVE-20322 setResultsDir("ql/src/test/results/clientpositive/druid"); setLogDir("itests/qtest/target/tmp/log"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java index ece6e774ad..dc4271e736 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java @@ -29,8 +29,10 @@ import org.apache.calcite.adapter.druid.ExtractOperatorConversion; import org.apache.calcite.adapter.druid.FloorOperatorConversion; import org.apache.calcite.adapter.druid.UnarySuffixOperatorConversion; +import org.apache.calcite.avatica.SqlType; import org.apache.calcite.config.CalciteConnectionConfig; import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; @@ -38,6 +40,7 @@ import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.sql.type.BasicSqlType; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.type.SqlTypeUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveConcat; @@ -118,7 +121,14 @@ private DruidSqlOperatorConverter() { @Nullable @Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, DruidQuery query ) { final RexCall call = (RexCall) rexNode; - final String arg = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); + RexNode subStringArg = call.getOperands().get(0); + if(subStringArg.getType().getSqlTypeName() != SqlTypeName.VARCHAR) { + // need to insert explicit cast to string + RexBuilder rexBuilder = query.getCluster().getRexBuilder(); + subStringArg = rexBuilder.makeCast(query.getCluster().getTypeFactory().createSqlType(SqlTypeName.VARCHAR), + subStringArg); + } + final String arg = DruidExpressions.toDruidExpression(subStringArg, rowType, query); if (arg == null) { return null; } diff --git a/ql/src/test/queries/clientpositive/druidmini_test1.q b/ql/src/test/queries/clientpositive/druidmini_test1.q index 30abf3cea0..f53cc05389 100644 --- a/ql/src/test/queries/clientpositive/druidmini_test1.q +++ b/ql/src/test/queries/clientpositive/druidmini_test1.q @@ -128,3 +128,10 @@ WHERE (`__time` BETWEEN '1968-01-01 00:00:00' AND '1970-01-01 00:00:00') -- this patch https://github.com/druid-io/druid/commit/219e77aeac9b07dc20dd9ab2dd537f3f17498346 explain select (cstring1 is null ) AS is_null, (cint is not null ) as isnotnull FROM druid_table_n3; + +explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5; +select substring(to_date(`__time`), 4) from druid_table_n3 limit 5; + +explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5; +select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5; + diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out index 45b9d78246..6f8551525f 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out @@ -839,3 +839,77 @@ STAGE PLANS: outputColumnNames: _col0, _col1 ListSink +PREHOOK: query: explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_n3 + properties: + druid.fieldNames vc + druid.fieldTypes string + druid.query.json {"queryType":"scan","dataSource":"default.druid_table_n3","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','UTC'), 3, -1)","outputType":"STRING"}],"columns":["vc"],"resultFormat":"compactedList","limit":5} + druid.query.type scan + Select Operator + expressions: vc (type: string) + outputColumnNames: _col0 + ListSink + +PREHOOK: query: select substring(to_date(`__time`), 4) from druid_table_n3 limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@druid_table_n3 +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select substring(to_date(`__time`), 4) from druid_table_n3 limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@druid_table_n3 +POSTHOOK: Output: hdfs://### HDFS PATH ### +9-12-31 +9-12-31 +9-12-31 +9-12-31 +9-12-31 +PREHOOK: query: explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_n3 + properties: + druid.fieldNames vc + druid.fieldTypes string + druid.query.json {"queryType":"scan","dataSource":"default.druid_table_n3","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','UTC'), 3, -1)","outputType":"STRING"}],"columns":["vc"],"resultFormat":"compactedList","limit":5} + druid.query.type scan + Select Operator + expressions: vc (type: string) + outputColumnNames: _col0 + ListSink + +PREHOOK: query: select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@druid_table_n3 +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@druid_table_n3 +POSTHOOK: Output: hdfs://### HDFS PATH ### +9-12-31 +9-12-31 +9-12-31 +9-12-31 +9-12-31