diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index a915a9b..604041f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -2091,7 +2091,7 @@ private RelNode genGBLogicalPlan(QB qb, RelNode srcRel) throws SemanticException if (obAST != null) { // 1. OB Expr sanity test // in strict mode, in the presence of order by, limit must be specified - Integer limit = qb.getParseInfo().getDestLimit(dest); + Long limit = qb.getParseInfo().getDestLimit(dest); if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict") && limit == null) { throw new SemanticException(SemanticAnalyzer.generateErrorMessage(obAST, @@ -2222,7 +2222,7 @@ public RexNode apply(RelDataTypeField input) { private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel) throws SemanticException { HiveRelNode sortRel = null; QBParseInfo qbp = getQBParseInfo(qb); - Integer limit = qbp.getDestToLimit().get(qbp.getClauseNames().iterator().next()); + Long limit = qbp.getDestToLimit().get(qbp.getClauseNames().iterator().next()); if (limit != null) { RexNode fetch = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(limit)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java index 14a7e9c..9da03cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java @@ -100,7 +100,7 @@ /* Order by clause */ private final HashMap destToOrderby; - private final HashMap destToLimit; + private final HashMap destToLimit; private int outerQueryLimit; // used by GroupBy @@ -129,7 +129,7 @@ public QBParseInfo(String alias, boolean isSubQ) { destToDistributeby = new HashMap(); destToSortby = new HashMap(); destToOrderby = new HashMap(); - destToLimit = new HashMap(); + destToLimit = new HashMap(); insertIntoTables = new HashSet(); destRollups = new HashSet(); destCubes = new HashSet(); @@ -441,11 +441,11 @@ public void setExprToColumnAlias(ASTNode expr, String alias) { exprToColumnAlias.put(expr, alias); } - public void setDestLimit(String dest, Integer limit) { + public void setDestLimit(String dest, Long limit) { destToLimit.put(dest, limit); } - public Integer getDestLimit(String dest) { + public Long getDestLimit(String dest) { return destToLimit.get(dest); } @@ -575,7 +575,7 @@ public TableSpec getTableSpec() { return tableSpecs.get(tName.next()); } - public HashMap getDestToLimit() { + public HashMap getDestToLimit() { return destToLimit; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index a52f2f2..08c5d04 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -1330,7 +1330,7 @@ public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1, PlannerContext plan break; case HiveParser.TOK_LIMIT: - qbp.setDestLimit(ctx_1.dest, new Integer(ast.getChild(0).getText())); + qbp.setDestLimit(ctx_1.dest, new Long(ast.getChild(0).getText())); break; case HiveParser.TOK_ANALYZE: @@ -7177,7 +7177,7 @@ private Operator genReduceSinkPlan(String dest, QB qb, Operator input, if (sortExprs != null) { assert numReducers == 1; // in strict mode, in the presence of order by, limit must be specified - Integer limit = qb.getParseInfo().getDestLimit(dest); + Long limit = qb.getParseInfo().getDestLimit(dest); if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase( "strict") && limit == null) { @@ -8954,7 +8954,7 @@ private Operator genPostGroupByBodyPlan(Operator curr, String dest, QB qb, } curr = genSelectPlan(dest, qb, curr, gbySource); - Integer limit = qbp.getDestLimit(dest); + Long limit = qbp.getDestLimit(dest); // Expressions are not supported currently without a alias.