diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java index e7416b1c75..92919e9daf 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java @@ -178,7 +178,7 @@ public MiniDruidCliConfig() { excludeQuery("druid_timestamptz.q"); // Disabled in HIVE-20322 excludeQuery("druidmini_joins.q"); // Disabled in HIVE-20322 excludeQuery("druidmini_masking.q"); // Disabled in HIVE-20322 - excludeQuery("druidmini_test1.q"); // Disabled in HIVE-20322 + //excludeQuery("druidmini_test1.q"); // Disabled in HIVE-20322 setResultsDir("ql/src/test/results/clientpositive/druid"); setLogDir("itests/qtest/target/tmp/log"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index 1a86294f2c..8d7b0b58fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -992,6 +992,19 @@ protected void validateUDF(ASTNode expr, boolean isFunction, TypeCheckCtx ctx, F } } + protected void insertCast(String funcText, ArrayList children) throws SemanticException { + // substring, concat UDFs expect first argument as string. Therefore this method inserts explicit cast + // to cast the first operand to string + if (funcText.equals("substring") || funcText.equals("concat")){ + if(children.size() > 0 && + !children.get(0).getTypeInfo().getTypeName().equals(serdeConstants.STRING_TYPE_NAME)){ + // need to insert explict cast to string + ExprNodeDesc newColumn = ParseUtils.createConversionCast(children.get(0), TypeInfoFactory.stringTypeInfo); + children.set(0, newColumn); + } + } + } + protected ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr, boolean isFunction, ArrayList children, TypeCheckCtx ctx) throws SemanticException, UDFArgumentException { @@ -1128,6 +1141,8 @@ protected ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr, } } + insertCast(funcText, children); + validateUDF(expr, isFunction, ctx, fi, children, genericUDF); // Try to infer the type of the constant only if there are two diff --git a/ql/src/test/queries/clientpositive/druidmini_test1.q b/ql/src/test/queries/clientpositive/druidmini_test1.q index 30abf3cea0..f53cc05389 100644 --- a/ql/src/test/queries/clientpositive/druidmini_test1.q +++ b/ql/src/test/queries/clientpositive/druidmini_test1.q @@ -128,3 +128,10 @@ WHERE (`__time` BETWEEN '1968-01-01 00:00:00' AND '1970-01-01 00:00:00') -- this patch https://github.com/druid-io/druid/commit/219e77aeac9b07dc20dd9ab2dd537f3f17498346 explain select (cstring1 is null ) AS is_null, (cint is not null ) as isnotnull FROM druid_table_n3; + +explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5; +select substring(to_date(`__time`), 4) from druid_table_n3 limit 5; + +explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5; +select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5; + diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out index 45b9d78246..6f8551525f 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out @@ -839,3 +839,77 @@ STAGE PLANS: outputColumnNames: _col0, _col1 ListSink +PREHOOK: query: explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_n3 + properties: + druid.fieldNames vc + druid.fieldTypes string + druid.query.json {"queryType":"scan","dataSource":"default.druid_table_n3","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','UTC'), 3, -1)","outputType":"STRING"}],"columns":["vc"],"resultFormat":"compactedList","limit":5} + druid.query.type scan + Select Operator + expressions: vc (type: string) + outputColumnNames: _col0 + ListSink + +PREHOOK: query: select substring(to_date(`__time`), 4) from druid_table_n3 limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@druid_table_n3 +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select substring(to_date(`__time`), 4) from druid_table_n3 limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@druid_table_n3 +POSTHOOK: Output: hdfs://### HDFS PATH ### +9-12-31 +9-12-31 +9-12-31 +9-12-31 +9-12-31 +PREHOOK: query: explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_n3 + properties: + druid.fieldNames vc + druid.fieldTypes string + druid.query.json {"queryType":"scan","dataSource":"default.druid_table_n3","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','UTC'), 3, -1)","outputType":"STRING"}],"columns":["vc"],"resultFormat":"compactedList","limit":5} + druid.query.type scan + Select Operator + expressions: vc (type: string) + outputColumnNames: _col0 + ListSink + +PREHOOK: query: select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@druid_table_n3 +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@druid_table_n3 +POSTHOOK: Output: hdfs://### HDFS PATH ### +9-12-31 +9-12-31 +9-12-31 +9-12-31 +9-12-31