diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java index 4db0714619..6aa98c08c4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java @@ -21,6 +21,7 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import org.apache.calcite.adapter.druid.DirectOperatorConversion; import org.apache.calcite.adapter.druid.DruidExpressions; @@ -51,6 +52,7 @@ import javax.annotation.Nullable; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.TimeZone; @@ -87,9 +89,9 @@ private DruidSqlOperatorConverter() { druidOperatorMap .put(SqlStdOperatorTable.SUBSTRING, new DruidSqlOperatorConverter.DruidSubstringOperatorConversion()); druidOperatorMap - .put(SqlStdOperatorTable.IS_NULL, new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_NULL, "isnull")); + .put(SqlStdOperatorTable.IS_NULL, new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NULL, "isnull")); druidOperatorMap.put(SqlStdOperatorTable.IS_NOT_NULL, - new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_NOT_NULL, "notnull") + new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NOT_NULL, "notnull") ); druidOperatorMap.put(HiveTruncSqlOperator.INSTANCE, new DruidDateTruncOperatorConversion()); druidOperatorMap.put(HiveToDateSqlOperator.INSTANCE, new DruidToDateOperatorConversion()); @@ -346,4 +348,33 @@ private static String applyTimestampFormat(String arg, String format, TimeZone t ); } + public static class UnaryFunctionOperatorConversion implements org.apache.calcite.adapter.druid.DruidSqlOperatorConverter { + + private final SqlOperator operator; + private final String druidOperator; + + public UnaryFunctionOperatorConversion(SqlOperator operator, String druidOperator) { + this.operator = operator; + this.druidOperator = druidOperator; + } + + @Override public SqlOperator calciteOperator() { + return operator; + } + + @Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, + DruidQuery druidQuery) { + final RexCall call = (RexCall) rexNode; + + final List druidExpressions = DruidExpressions.toDruidExpressions( + druidQuery, rowType, + call.getOperands()); + + if (druidExpressions == null) { + return null; + } + + return DruidQuery.format("%s(%s)", druidOperator, Iterables.getOnlyElement(druidExpressions)); + } + } } diff --git ql/src/test/results/clientpositive/druid/druidmini_test1.q.out ql/src/test/results/clientpositive/druid/druidmini_test1.q.out index 89da36a84a..4e078aa41c 100644 --- ql/src/test/results/clientpositive/druid/druidmini_test1.q.out +++ ql/src/test/results/clientpositive/druid/druidmini_test1.q.out @@ -814,7 +814,7 @@ STAGE PLANS: properties: druid.fieldNames vc,vc0 druid.fieldTypes boolean,boolean - druid.query.json {"queryType":"scan","dataSource":"default.druid_table_n3","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"cstring1\" isnull)","outputType":"FLOAT"},{"type":"expression","name":"vc0","expression":"(\"cint\" notnull)","outputType":"FLOAT"}],"columns":["vc","vc0"],"resultFormat":"compactedList"} + druid.query.json {"queryType":"scan","dataSource":"default.druid_table_n3","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"isnull(\"cstring1\")","outputType":"FLOAT"},{"type":"expression","name":"vc0","expression":"notnull(\"cint\")","outputType":"FLOAT"}],"columns":["vc","vc0"],"resultFormat":"compactedList"} druid.query.type scan Select Operator expressions: vc (type: boolean), vc0 (type: boolean)