Index: ql/src/test/results/clientnegative/groupby_key.q.out =================================================================== --- ql/src/test/results/clientnegative/groupby_key.q.out (revision 0) +++ ql/src/test/results/clientnegative/groupby_key.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: line 1:14 Expression Not In Group By Key value Index: ql/src/test/queries/clientnegative/groupby_key.q =================================================================== --- ql/src/test/queries/clientnegative/groupby_key.q (revision 0) +++ ql/src/test/queries/clientnegative/groupby_key.q (revision 0) @@ -0,0 +1 @@ +SELECT concat(value, concat(value)) FROM src GROUP BY concat(value); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 5071) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (working copy) @@ -75,12 +75,12 @@ * by operators key, so we substitute a+b in the select list with the internal * column name of the a+b expression that appears in the in input row * resolver. - * + * * @param nd * The node that is being inspected. * @param procCtx * The processor context. - * + * * @return exprNodeColumnDesc. */ public static ExprNodeDesc processGByExpr(Node nd, Object procCtx) @@ -137,7 +137,7 @@ /** * Factory method to get NullExprProcessor. - * + * * @return NullExprProcessor. */ public static NullExprProcessor getNullExprProcessor() { @@ -186,7 +186,7 @@ /** * Factory method to get NumExprProcessor. - * + * * @return NumExprProcessor. */ public static NumExprProcessor getNumExprProcessor() { @@ -236,7 +236,7 @@ /** * Factory method to get StrExprProcessor. - * + * * @return StrExprProcessor. */ public static StrExprProcessor getStrExprProcessor() { @@ -282,7 +282,7 @@ /** * Factory method to get BoolExprProcessor. - * + * * @return BoolExprProcessor. */ public static BoolExprProcessor getBoolExprProcessor() { @@ -312,7 +312,7 @@ RowResolver input = ctx.getInputRR(); if (expr.getType() != HiveParser.TOK_TABLE_OR_COL) { - ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr)); + ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr), expr); return null; } @@ -326,7 +326,7 @@ if (isTableAlias) { if (colInfo != null) { // it's a table alias, and also a column - ctx.setError(ErrorMsg.AMBIGUOUS_TABLE_OR_COLUMN.getMsg(expr)); + ctx.setError(ErrorMsg.AMBIGUOUS_TABLE_OR_COLUMN.getMsg(expr), expr); return null; } else { // It's a table alias. @@ -337,11 +337,11 @@ if (colInfo == null) { // It's not a column or a table alias. if (input.getIsExprResolver()) { - ctx.setError(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(expr)); + ctx.setError(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(expr), expr); return null; } else { ctx.setError(ErrorMsg.INVALID_TABLE_OR_COLUMN.getMsg(expr - .getChild(0))); + .getChild(0)), expr); LOG.debug(ErrorMsg.INVALID_TABLE_OR_COLUMN.toString() + ":" + input.toString()); return null; @@ -360,7 +360,7 @@ /** * Factory method to get ColumnExprProcessor. - * + * * @return ColumnExprProcessor. */ public static ColumnExprProcessor getColumnExprProcessor() { @@ -455,7 +455,7 @@ /** * Get the exprNodeDesc. - * + * * @param name * @param children * @return The expression node descriptor @@ -476,7 +476,7 @@ * This function create an ExprNodeDesc for a UDF function given the * children (arguments). It will insert implicit type conversion functions * if necessary. - * + * * @throws UDFArgumentException */ public static ExprNodeDesc getFuncExprNodeDesc(String udfName, @@ -636,16 +636,34 @@ return desc; } + private boolean isDescendant(Node parent, Node des) { + if(parent.getChildren() == null) { + return false; + } + for (Node c : parent.getChildren()) { + if(c == des) { + return true; + } + if(isDescendant(c, des)) { + return true; + } + } + return false; + } + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { TypeCheckCtx ctx = (TypeCheckCtx) procCtx; - // If this is a GroupBy expression, clear error and continue ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx); if (desc != null) { - ctx.setError(null); + // A descendant node likely produced an error because it's not a group by + // expression. We can safely clear it as desc overrides any children. + if(isDescendant(nd, ctx.getErrorSrcNode())) { + ctx.setError(null, null); + } return desc; } @@ -670,7 +688,7 @@ ((ExprNodeConstantDesc) nodeOutputs[1]).getValue().toString()); if (colInfo == null) { - ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1))); + ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)), expr); return null; } return new ExprNodeColumnDesc(colInfo.getType(), colInfo @@ -722,7 +740,7 @@ /** * Factory method to get DefaultExprProcessor. - * + * * @return DefaultExprProcessor. */ public static DefaultExprProcessor getDefaultExprProcessor() { Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java (revision 5071) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java (working copy) @@ -43,8 +43,13 @@ private String error; /** + * The node that generated the potential typecheck error + */ + private ASTNode errorSrcNode; + + /** * Constructor. - * + * * @param inputRR * The input row resolver of the previous operator. */ @@ -86,9 +91,11 @@ /** * @param error * the error to set + * */ - public void setError(String error) { + public void setError(String error, ASTNode errorSrcNode) { this.error = error; + this.errorSrcNode = errorSrcNode; } /** @@ -98,4 +105,8 @@ return error; } + public ASTNode getErrorSrcNode() { + return errorSrcNode; + } + }