diff --git common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java
index 29dc06dca1a..f0b28c720df 100644
--- common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java
+++ common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java
@@ -43,6 +43,7 @@ public HiveChar(HiveChar hc, int len) {
/**
* Set char value, padding or truncating the value to the size of len parameter.
*/
+ @Override
public void setValue(String val, int len) {
super.setValue(HiveBaseChar.getPaddedValue(val, len), -1);
}
@@ -59,15 +60,18 @@ public String getPaddedValue() {
return value;
}
+ @Override
public int getCharacterLength() {
String strippedValue = getStrippedValue();
return strippedValue.codePointCount(0, strippedValue.length());
}
+ @Override
public String toString() {
return getPaddedValue();
}
+ @Override
public int compareTo(HiveChar rhs) {
if (rhs == this) {
return 0;
@@ -75,6 +79,7 @@ public int compareTo(HiveChar rhs) {
return this.getStrippedValue().compareTo(rhs.getStrippedValue());
}
+ @Override
public boolean equals(Object rhs) {
if (rhs == this) {
return true;
@@ -85,7 +90,9 @@ public boolean equals(Object rhs) {
return this.getStrippedValue().equals(((HiveChar) rhs).getStrippedValue());
}
+ @Override
public int hashCode() {
return getStrippedValue().hashCode();
}
+
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index e7d71595c7a..1a583115308 100644
--- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -18,13 +18,6 @@
package org.apache.hadoop.hive.ql;
-import java.io.FileNotFoundException;
-import java.text.MessageFormat;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
import org.antlr.runtime.tree.Tree;
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
@@ -35,6 +28,13 @@
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
import org.apache.hadoop.security.AccessControlException;
+import java.io.FileNotFoundException;
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
/**
* List of all error messages.
* This list contains both compile time and run-time errors.
@@ -469,6 +469,7 @@
LOAD_DATA_LAUNCH_JOB_PARSE_ERROR(10416, "Encountered parse error while parsing rewritten load data into insert query"),
RESOURCE_PLAN_ALREADY_EXISTS(10417, "Resource plan {0} already exists", true),
RESOURCE_PLAN_NOT_EXISTS(10418, "Resource plan {0} does not exist", true),
+ INCOMPATIBLE_STRUCT(10419, "Incompatible structs.", true),
//========================== 20000 range starts here ========================//
diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePointLookupOptimizerRule.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePointLookupOptimizerRule.java
index 04800cca91b..9a525adeb90 100644
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePointLookupOptimizerRule.java
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePointLookupOptimizerRule.java
@@ -20,25 +20,24 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
+import java.util.stream.Collectors;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelOptRuleOperand;
import org.apache.calcite.plan.RelOptUtil;
-import org.apache.calcite.rel.AbstractRelNode;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.core.Join;
-import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexCall;
-import org.apache.calcite.rex.RexInputRef;
-import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexShuttle;
import org.apache.calcite.rex.RexUtil;
@@ -59,15 +58,18 @@
import com.google.common.collect.Multimaps;
import com.google.common.collect.Sets;
-public abstract class HivePointLookupOptimizerRule extends RelOptRule {
-
/**
- * This optimization will take a Filter or expression, and if its predicate contains
- * an OR operator whose children are constant equality expressions, it will try
- * to generate an IN clause (which is more efficient). If the OR operator contains
- * AND operator children, the optimization might generate an IN clause that uses
- * structs.
+ * This optimization attempts to identify and close expanded INs.
+ *
+ * Basically:
+ *
+ * (c) IN ( v1, v2, ...) <=> c1=v1 || c1=v2 || ...
+ *
+ * If c is struct; then c=v1 is a group of anded equations.
*/
+public abstract class HivePointLookupOptimizerRule extends RelOptRule {
+
+ /** Rule adapter to apply the transformation to Filter conditions. */
public static class FilterCondition extends HivePointLookupOptimizerRule {
public FilterCondition (int minNumORClauses) {
super(operand(Filter.class, any()), minNumORClauses);
@@ -78,22 +80,20 @@ public void onMatch(RelOptRuleCall call) {
final Filter filter = call.rel(0);
final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
final RexNode condition = RexUtil.pullFactors(rexBuilder, filter.getCondition());
- analyzeCondition(call , rexBuilder, filter, condition);
- }
- @Override protected RelNode copyNode(AbstractRelNode node, RexNode newCondition) {
- final Filter filter = (Filter) node;
- return filter.copy(filter.getTraitSet(), filter.getInput(), newCondition);
+ RexNode newCondition = analyzeRexNode(rexBuilder, condition);
+
+ // If we could not transform anything, we bail out
+ if (newCondition.toString().equals(condition.toString())) {
+ return;
+ }
+ RelNode newNode = filter.copy(filter.getTraitSet(), filter.getInput(), newCondition);
+
+ call.transformTo(newNode);
}
}
-/**
- * This optimization will take a Join or expression, and if its join condition contains
- * an OR operator whose children are constant equality expressions, it will try
- * to generate an IN clause (which is more efficient). If the OR operator contains
- * AND operator children, the optimization might generate an IN clause that uses
- * structs.
- */
+ /** Rule adapter to apply the transformation to Join conditions. */
public static class JoinCondition extends HivePointLookupOptimizerRule {
public JoinCondition (int minNumORClauses) {
super(operand(Join.class, any()), minNumORClauses);
@@ -104,18 +104,55 @@ public void onMatch(RelOptRuleCall call) {
final Join join = call.rel(0);
final RexBuilder rexBuilder = join.getCluster().getRexBuilder();
final RexNode condition = RexUtil.pullFactors(rexBuilder, join.getCondition());
- analyzeCondition(call , rexBuilder, join, condition);
+
+ RexNode newCondition = analyzeRexNode(rexBuilder, condition);
+
+ // If we could not transform anything, we bail out
+ if (newCondition.toString().equals(condition.toString())) {
+ return;
+ }
+
+ RelNode newNode = join.copy(join.getTraitSet(),
+ newCondition,
+ join.getLeft(),
+ join.getRight(),
+ join.getJoinType(),
+ join.isSemiJoinDone());
+
+ call.transformTo(newNode);
+ }
+ }
+
+ /** Rule adapter to apply the transformation to Projections. */
+ public static class ProjectionExpressions extends HivePointLookupOptimizerRule {
+ public ProjectionExpressions(int minNumORClauses) {
+ super(operand(Project.class, any()), minNumORClauses);
}
- @Override protected RelNode copyNode(AbstractRelNode node, RexNode newCondition) {
- final Join join = (Join) node;
- return join.copy(join.getTraitSet(),
- newCondition,
- join.getLeft(),
- join.getRight(),
- join.getJoinType(),
- join.isSemiJoinDone());
+ @Override
+ public void onMatch(RelOptRuleCall call) {
+ final Project project = call.rel(0);
+ boolean changed = false;
+ final RexBuilder rexBuilder = project.getCluster().getRexBuilder();
+ List newProjects = new ArrayList<>();
+ for (RexNode oldNode : project.getProjects()) {
+ RexNode newNode = analyzeRexNode(rexBuilder, oldNode);
+ if (!newNode.toString().equals(oldNode.toString())) {
+ changed = true;
+ newProjects.add(newNode);
+ } else {
+ newProjects.add(oldNode);
+ }
+ }
+ if (!changed) {
+ return;
+ }
+ Project newProject = project.copy(project.getTraitSet(), project.getInput(), newProjects,
+ project.getRowType(), project.getFlags());
+ call.transformTo(newProject);
+
}
+
}
protected static final Logger LOG = LoggerFactory.getLogger(HivePointLookupOptimizerRule.class);
@@ -123,37 +160,21 @@ public void onMatch(RelOptRuleCall call) {
// Minimum number of OR clauses needed to transform into IN clauses
protected final int minNumORClauses;
- protected abstract RelNode copyNode(AbstractRelNode node, RexNode newCondition);
-
protected HivePointLookupOptimizerRule(
RelOptRuleOperand operand, int minNumORClauses) {
super(operand);
this.minNumORClauses = minNumORClauses;
}
- public void analyzeCondition(RelOptRuleCall call,
- RexBuilder rexBuilder,
- AbstractRelNode node,
- RexNode condition) {
-
+ public RexNode analyzeRexNode(RexBuilder rexBuilder, RexNode condition) {
// 1. We try to transform possible candidates
- RexTransformIntoInClause transformIntoInClause = new RexTransformIntoInClause(rexBuilder, node,
- minNumORClauses);
+ RexTransformIntoInClause transformIntoInClause = new RexTransformIntoInClause(rexBuilder, minNumORClauses);
RexNode newCondition = transformIntoInClause.apply(condition);
// 2. We merge IN expressions
RexMergeInClause mergeInClause = new RexMergeInClause(rexBuilder);
newCondition = mergeInClause.apply(newCondition);
-
- // 3. If we could not transform anything, we bail out
- if (newCondition.toString().equals(condition.toString())) {
- return;
- }
-
- // 4. We create the Filter/Join with the new condition
- RelNode newNode = copyNode(node, newCondition);
-
- call.transformTo(newNode);
+ return newCondition;
}
@@ -162,11 +183,9 @@ public void analyzeCondition(RelOptRuleCall call,
*/
protected static class RexTransformIntoInClause extends RexShuttle {
private final RexBuilder rexBuilder;
- private final AbstractRelNode nodeOp;
private final int minNumORClauses;
- RexTransformIntoInClause(RexBuilder rexBuilder, AbstractRelNode nodeOp, int minNumORClauses) {
- this.nodeOp = nodeOp;
+ RexTransformIntoInClause(RexBuilder rexBuilder, int minNumORClauses) {
this.rexBuilder = rexBuilder;
this.minNumORClauses = minNumORClauses;
}
@@ -180,7 +199,7 @@ public RexNode visitCall(RexCall inputCall) {
case OR:
try {
RexNode newNode = transformIntoInClauseCondition(rexBuilder,
- nodeOp.getRowType(), call, minNumORClauses);
+ call, minNumORClauses);
if (newNode != null) {
return newNode;
}
@@ -196,18 +215,56 @@ public RexNode visitCall(RexCall inputCall) {
}
/**
- * Represents a simple contraint.
+ * This class just wraps around a RexNode enables equals/hashCode based on toString.
+ *
+ * After CALCITE-2632 this might not be needed anymore */
+ static class RexNodeRef {
+
+ public static Comparator COMPARATOR =
+ (RexNodeRef o1, RexNodeRef o2) -> o1.node.toString().compareTo(o2.node.toString());
+ private RexNode node;
+
+ public RexNodeRef(RexNode node) {
+ this.node = node;
+ }
+
+ public RexNode getRexNode() {
+ return node;
+ }
+
+ @Override
+ public int hashCode() {
+ return node.toString().hashCode();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof RexNodeRef) {
+ RexNodeRef otherRef = (RexNodeRef) o;
+ return node.toString().equals(otherRef.node.toString());
+ }
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return "ref for:" + node.toString();
+ }
+ }
+ /**
+ * Represents a contraint.
*
* Example: a=1
+ * substr(a,1,2) = concat('asd','xxx')
*/
static class Constraint {
- private RexLiteral literal;
- private RexInputRef inputRef;
+ private RexNode exprNode;
+ private RexNode constNode;
- public Constraint(RexInputRef inputRef, RexLiteral literal) {
- this.literal = literal;
- this.inputRef = inputRef;
+ public Constraint(RexNode exprNode, RexNode constNode) {
+ this.exprNode = constNode;
+ this.constNode = exprNode;
}
/**
@@ -223,21 +280,31 @@ public static Constraint of(RexNode n) {
}
RexNode opA = call.operands.get(0);
RexNode opB = call.operands.get(1);
- if (opA instanceof RexLiteral && opB instanceof RexInputRef) {
- RexLiteral rexLiteral = (RexLiteral) opA;
- RexInputRef rexInputRef = (RexInputRef) opB;
- return new Constraint(rexInputRef, rexLiteral);
+ if (RexUtil.isNull(opA) || RexUtil.isNull(opB)) {
+ // dont try to compare nulls
+ return null;
+ }
+ if (isConstExpr(opA) && isColumnExpr(opB)) {
+ return new Constraint(opB, opA);
}
- if (opA instanceof RexInputRef && opB instanceof RexLiteral) {
- RexLiteral rexLiteral = (RexLiteral) opB;
- RexInputRef rexInputRef = (RexInputRef) opA;
- return new Constraint(rexInputRef, rexLiteral);
+ if (isColumnExpr(opA) && isConstExpr(opB)) {
+ return new Constraint(opA, opB);
}
return null;
}
- public RexInputRef getKey() {
- return inputRef;
+ private static boolean isColumnExpr(RexNode node) {
+ return !node.getType().isStruct() && HiveCalciteUtil.getInputRefs(node).size() > 0
+ && HiveCalciteUtil.isDeterministic(node);
+ }
+
+ private static boolean isConstExpr(RexNode node) {
+ return !node.getType().isStruct() && HiveCalciteUtil.getInputRefs(node).size() == 0
+ && HiveCalciteUtil.isDeterministic(node);
+ }
+
+ public RexNodeRef getKey() {
+ return new RexNodeRef(constNode);
}
}
@@ -254,17 +321,17 @@ public RexInputRef getKey() {
*
*/
static class ConstraintGroup {
+ public static final Function> KEY_FUNCTION =
+ new Function>() {
- public static final Function> KEY_FUNCTION = new Function>() {
-
- @Override
- public Set apply(ConstraintGroup a) {
- return a.key;
- }
- };
- private Map constraints = new HashMap<>();
+ @Override
+ public Set apply(ConstraintGroup cg) {
+ return cg.key;
+ }
+ };
+ private Map constraints = new HashMap<>();
private RexNode originalRexNode;
- private final Set key;
+ private final Set key;
public ConstraintGroup(RexNode rexNode) {
originalRexNode = rexNode;
@@ -289,21 +356,21 @@ public ConstraintGroup(RexNode rexNode) {
key = constraints.keySet();
}
- public List getValuesInOrder(List columns) throws SemanticException {
+ public List getValuesInOrder(List columns) throws SemanticException {
List ret = new ArrayList<>();
- for (RexInputRef rexInputRef : columns) {
+ for (RexNodeRef rexInputRef : columns) {
Constraint constraint = constraints.get(rexInputRef);
if (constraint == null) {
throw new SemanticException("Unable to find constraint which was earlier added.");
}
- ret.add(constraint.literal);
+ ret.add(constraint.exprNode);
}
return ret;
}
}
- private RexNode transformIntoInClauseCondition(RexBuilder rexBuilder, RelDataType inputSchema,
- RexNode condition, int minNumORClauses) throws SemanticException {
+ private RexNode transformIntoInClauseCondition(RexBuilder rexBuilder, RexNode condition,
+ int minNumORClauses) throws SemanticException {
assert condition.getKind() == SqlKind.OR;
ImmutableList operands = RexUtil.flattenOr(((RexCall) condition).getOperands());
@@ -318,10 +385,10 @@ private RexNode transformIntoInClauseCondition(RexBuilder rexBuilder, RelDataTyp
allNodes.add(m);
}
- Multimap, ConstraintGroup> assignmentGroups =
+ Multimap, ConstraintGroup> assignmentGroups =
Multimaps.index(allNodes, ConstraintGroup.KEY_FUNCTION);
- for (Entry, Collection> sa : assignmentGroups.asMap().entrySet()) {
+ for (Entry, Collection> sa : assignmentGroups.asMap().entrySet()) {
// skip opaque
if (sa.getKey().size() == 0) {
continue;
@@ -351,13 +418,15 @@ private RexNode transformIntoInClauseCondition(RexBuilder rexBuilder, RelDataTyp
}
- private RexNode buildInFor(Set set, Collection value) throws SemanticException {
+ private RexNode buildInFor(Set set, Collection value) throws SemanticException {
- List columns = new ArrayList();
+ List columns = new ArrayList<>();
columns.addAll(set);
+ columns.sort(RexNodeRef.COMPARATOR);
Listoperands = new ArrayList<>();
- operands.add(useStructIfNeeded(columns));
+ List columnNodes = columns.stream().map(n -> n.getRexNode()).collect(Collectors.toList());
+ operands.add(useStructIfNeeded(columnNodes));
for (ConstraintGroup node : value) {
List values = node.getValuesInOrder(columns);
operands.add(useStructIfNeeded(values));
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index c9df668d4a7..3a51d9795b0 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -910,7 +910,7 @@ public static void validateCheckConstraint(List cols, List stack, NodeProcessorCtx procCtx,
}
public static ExprNodeConstantDesc createDecimal(String strVal, boolean notNull) {
- // Note: the normalize() call with rounding in HiveDecimal will currently reduce the
- // precision and scale of the value by throwing away trailing zeroes. This may or may
- // not be desirable for the literals; however, this used to be the default behavior
- // for explicit decimal literals (e.g. 1.0BD), so we keep this behavior for now.
HiveDecimal hd = HiveDecimal.create(strVal);
if (notNull && hd == null) {
return null;
}
+ return new ExprNodeConstantDesc(adjustType(hd), hd);
+ }
+
+ private static DecimalTypeInfo adjustType(HiveDecimal hd) {
+ // Note: the normalize() call with rounding in HiveDecimal will currently reduce the
+ // precision and scale of the value by throwing away trailing zeroes. This may or may
+ // not be desirable for the literals; however, this used to be the default behavior
+ // for explicit decimal literals (e.g. 1.0BD), so we keep this behavior for now.
int prec = 1;
int scale = 0;
if (hd != null) {
@@ -368,7 +373,7 @@ public static ExprNodeConstantDesc createDecimal(String strVal, boolean notNull)
scale = hd.scale();
}
DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale);
- return new ExprNodeConstantDesc(typeInfo, hd);
+ return typeInfo;
}
}
@@ -1163,36 +1168,59 @@ protected ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr,
ExprNodeDesc constChild = children.get(constIdx);
ExprNodeDesc columnChild = children.get(1 - constIdx);
- final PrimitiveTypeInfo colTypeInfo =
- TypeInfoFactory.getPrimitiveTypeInfo(columnChild.getTypeString().toLowerCase());
- ExprNodeDesc newChild = interpretNodeAs(colTypeInfo, constChild);
- if (newChild == null) {
- // non-interpretabe as that type...
- if (genericUDF instanceof GenericUDFOPEqual) {
- return new ExprNodeConstantDesc(false);
- }
- } else {
- children.set(constIdx, newChild);
+ final PrimitiveTypeInfo colTypeInfo =
+ TypeInfoFactory.getPrimitiveTypeInfo(columnChild.getTypeString().toLowerCase());
+ ExprNodeDesc newChild = interpretNodeAs(colTypeInfo, constChild);
+ if (newChild == null) {
+ // non-interpretable as target type...
+ // TODO: all comparisons with null should result in null
+ if (genericUDF instanceof GenericUDFOPEqual
+ && !(genericUDF instanceof GenericUDFOPEqualNS)) {
+ return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, null);
}
+ } else {
+ children.set(constIdx, newChild);
+ }
}
- if (genericUDF instanceof GenericUDFIn && children.get(0) instanceof ExprNodeColumnDesc) {
- ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) children.get(0);
- final PrimitiveTypeInfo colTypeInfo =
- TypeInfoFactory.getPrimitiveTypeInfo(columnDesc.getTypeString().toLowerCase());
+ if (genericUDF instanceof GenericUDFIn) {
+
+ ExprNodeDesc columnDesc = children.get(0);
List outputOpList = children.subList(1, children.size());
ArrayList inOperands = new ArrayList<>(outputOpList);
outputOpList.clear();
+ boolean hasNullValue = false;
for (ExprNodeDesc oldChild : inOperands) {
- if(oldChild !=null && oldChild instanceof ExprNodeConstantDesc) {
- ExprNodeDesc newChild = interpretNodeAs(colTypeInfo, oldChild);
- if(newChild == null) {
- // non interpretable as target type; skip
- continue;
+ if (oldChild == null) {
+ hasNullValue = true;
+ continue;
+ }
+ ExprNodeDesc newChild = interpretNodeAsStruct(columnDesc, oldChild);
+ if (newChild == null) {
+ hasNullValue = true;
+ continue;
+ }
+ outputOpList.add(newChild);
+ }
+
+ if (hasNullValue) {
+ ExprNodeConstantDesc nullConst = new ExprNodeConstantDesc(columnDesc.getTypeInfo(), null);
+ if (outputOpList.size() == 0) {
+ // we have found only null values...remove the IN ; it will be null all the time.
+ return nullConst;
+ }
+ outputOpList.add(nullConst);
+ }
+ if (!ctx.isCBOExecuted()) {
+ ArrayList orOperands = TypeCheckProcFactoryUtils.rewriteInToOR(children);
+ if (orOperands != null) {
+ if (orOperands.size() == 1) {
+ orOperands.add(new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, false));
}
- outputOpList.add(newChild);
- }else{
- outputOpList.add(oldChild);
+ funcText = "or";
+ genericUDF = new GenericUDFOPOr();
+ children.clear();
+ children.addAll(orOperands);
}
}
}
@@ -1258,48 +1286,145 @@ protected ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr,
return desc;
}
- private ExprNodeDesc interpretNodeAs(PrimitiveTypeInfo colTypeInfo, ExprNodeDesc constChild) {
+ /**
+ * Interprets the given value as columnDesc if possible
+ */
+ private static ExprNodeDesc interpretNodeAsStruct(ExprNodeDesc columnDesc, ExprNodeDesc valueDesc)
+ throws SemanticException {
+ if(columnDesc instanceof ExprNodeColumnDesc) {
+ ExprNodeColumnDesc exprNodeColumnDesc = (ExprNodeColumnDesc) columnDesc;
+ final PrimitiveTypeInfo typeInfo =
+ TypeInfoFactory.getPrimitiveTypeInfo(exprNodeColumnDesc.getTypeString().toLowerCase());
+ return interpretNodeAs(typeInfo, valueDesc);
+ }
+ if (ExprNodeDescUtils.isStructUDF(columnDesc) && ExprNodeDescUtils.isConstantStruct(valueDesc)) {
+ List columnChilds = ((ExprNodeGenericFuncDesc) columnDesc).getChildren();
+ ExprNodeConstantDesc valueConstDesc = (ExprNodeConstantDesc) valueDesc;
+ StructTypeInfo structTypeInfo = (StructTypeInfo) valueConstDesc.getTypeInfo();
+ ArrayList structFieldInfos = structTypeInfo.getAllStructFieldTypeInfos();
+ ArrayList newStructFieldInfos = new ArrayList<>();
+
+ if (columnChilds.size() != structFieldInfos.size()) {
+ throw new SemanticException(ErrorMsg.INCOMPATIBLE_STRUCT.getMsg(columnChilds + " and " + structFieldInfos));
+ }
+ List