Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (revision 1504363) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (working copy) @@ -80,22 +80,20 @@ public void initializePTFChain(PartitionedTableFunctionDef tblFnDef) throws HiveException { Stack ptfChain = new Stack(); PTFInputDef currentDef = tblFnDef; - while (currentDef != null ) { + while (currentDef != null) { ptfChain.push(currentDef); currentDef = currentDef.getInput(); } - while ( !ptfChain.isEmpty() ) { + while (!ptfChain.isEmpty()) { currentDef = ptfChain.pop(); - if ( currentDef instanceof PTFQueryInputDef) { - initialize((PTFQueryInputDef)currentDef, inputOI); + if (currentDef instanceof PTFQueryInputDef) { + initialize((PTFQueryInputDef) currentDef, inputOI); + } else if (currentDef instanceof WindowTableFunctionDef) { + initializeWindowing((WindowTableFunctionDef) currentDef); + } else { + initialize((PartitionedTableFunctionDef) currentDef); } - else if ( currentDef instanceof WindowTableFunctionDef) { - initializeWindowing((WindowTableFunctionDef)currentDef); - } - else { - initialize((PartitionedTableFunctionDef)currentDef); - } } } @@ -114,16 +112,16 @@ /* * 2. initialize WFns. */ - if ( def.getWindowFunctions() != null ) { - for(WindowFunctionDef wFnDef : def.getWindowFunctions() ) { + if (def.getWindowFunctions() != null) { + for (WindowFunctionDef wFnDef : def.getWindowFunctions()) { - if ( wFnDef.getArgs() != null ) { - for(PTFExpressionDef arg : wFnDef.getArgs()) { + if (wFnDef.getArgs() != null) { + for (PTFExpressionDef arg : wFnDef.getArgs()) { initialize(arg, inpShape); } } - if ( wFnDef.getWindowFrame() != null ) { + if (wFnDef.getWindowFrame() != null) { WindowFrameDef wFrmDef = wFnDef.getWindowFrame(); initialize(wFrmDef.getStart(), inpShape); initialize(wFrmDef.getEnd(), inpShape); @@ -132,10 +130,10 @@ } ArrayList aliases = new ArrayList(); ArrayList fieldOIs = new ArrayList(); - for(WindowFunctionDef wFnDef : def.getWindowFunctions()) { + for (WindowFunctionDef wFnDef : def.getWindowFunctions()) { aliases.add(wFnDef.getAlias()); - if ( wFnDef.isPivotResult() ) { - fieldOIs.add(((ListObjectInspector)wFnDef.getOI()).getListElementObjectInspector()); + if (wFnDef.isPivotResult()) { + fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector()); } else { fieldOIs.add(wFnDef.getOI()); } @@ -145,8 +143,7 @@ aliases, fieldOIs); tResolver.setWdwProcessingOutputOI(wdwOutOI); initialize(def.getOutputFromWdwFnProcessing(), wdwOutOI); - } - else { + } else { def.setOutputFromWdwFnProcessing(inpShape); } @@ -155,8 +152,8 @@ /* * 3. initialize WExprs. + having clause */ - if ( def.getWindowExpressions() != null ) { - for(WindowExpressionDef wEDef : def.getWindowExpressions()) { + if (def.getWindowExpressions() != null) { + for (WindowExpressionDef wEDef : def.getWindowExpressions()) { initialize(wEDef, inpShape); } } @@ -171,7 +168,7 @@ * If we have windowExpressions then we convert to Std. Object to process; * we just stream these rows; no need to put in an output Partition. */ - if ( def.getWindowExpressions().size() > 0 ) { + if (def.getWindowExpressions().size() > 0) { StructObjectInspector oi = (StructObjectInspector) ObjectInspectorUtils.getStandardObjectInspector(def.getOutputShape().getOI()); def.getOutputShape().setOI(oi); @@ -189,8 +186,8 @@ /* * 1. initialize args */ - if (def.getArgs() != null ) { - for(PTFExpressionDef arg : def.getArgs()) { + if (def.getArgs() != null) { + for (PTFExpressionDef arg : def.getArgs()) { initialize(arg, inpShape); } } @@ -199,19 +196,17 @@ * 2. setup resolve, make connections */ TableFunctionEvaluator tEval = def.getTFunction(); - //TableFunctionResolver tResolver = FunctionRegistry.getTableFunctionResolver(def.getName()); + // TableFunctionResolver tResolver = FunctionRegistry.getTableFunctionResolver(def.getName()); TableFunctionResolver tResolver = constructResolver(def.getResolverClassName()); tResolver.initialize(ptfDesc, def, tEval); /* * 3. give Evaluator chance to setup for RawInput execution; setup RawInput shape */ - if (tEval.isTransformsRawInput()) - { + if (tEval.isTransformsRawInput()) { tResolver.initializeRawInputOI(); initialize(def.getRawInputShape(), tEval.getRawInputOI()); - } - else { + } else { def.setRawInputShape(inpShape); } @@ -224,8 +219,7 @@ initialize(def.getOutputShape(), tEval.getOutputOI()); } - static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException - { + static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException { ArrayList args = def.getArgs(); ArrayList argOIs = new ArrayList(); ObjectInspector[] funcArgOIs = null; @@ -245,7 +239,7 @@ } protected void initialize(BoundaryDef def, ShapeDetails inpShape) throws HiveException { - if ( def instanceof ValueBoundaryDef ) { + if (def instanceof ValueBoundaryDef) { ValueBoundaryDef vDef = (ValueBoundaryDef) def; initialize(vDef.getExpressionDef(), inpShape); } @@ -262,8 +256,7 @@ private ObjectInspector initExprNodeEvaluator(ExprNodeEvaluator exprEval, ExprNodeDesc exprNode, ShapeDetails inpShape) - throws HiveException - { + throws HiveException { ObjectInspector outOI; outOI = exprEval.initialize(inpShape.getOI()); @@ -274,10 +267,8 @@ * evaluator on the LLUDF instance. */ List llFuncExprs = llInfo.getLLFuncExprsInTopExpr(exprNode); - if (llFuncExprs != null) - { - for (ExprNodeGenericFuncDesc llFuncExpr : llFuncExprs) - { + if (llFuncExprs != null) { + for (ExprNodeGenericFuncDesc llFuncExpr : llFuncExprs) { ExprNodeDesc firstArg = llFuncExpr.getChildren().get(0); ExprNodeEvaluator dupExprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, firstArg); dupExprEval.initialize(inpShape.getOI()); @@ -302,8 +293,7 @@ serDe.initialize(hConf, serDeProps); shp.setSerde(serDe); shp.setOI((StructObjectInspector) serDe.getObjectInspector()); - } - catch (SerDeException se) + } catch (SerDeException se) { throw new HiveException(se); } @@ -324,19 +314,18 @@ try { @SuppressWarnings("unchecked") Class rCls = (Class) - Class.forName(className); + Class.forName(className); return (TableFunctionResolver) ReflectionUtils.newInstance(rCls, null); - } - catch(Exception e) { + } catch (Exception e) { throw new HiveException(e); } } @SuppressWarnings({"unchecked"}) public static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI, - Map serdePropsMap) { + Map serdePropsMap) { - if ( serdePropsMap == null ) { + if (serdePropsMap == null) { return; } @@ -368,7 +357,7 @@ ArrayList fnames = t.getAllStructFieldNames(); ArrayList fields = t.getAllStructFieldTypeInfos(); return new ArrayList[] - { fnames, fields }; + {fnames, fields}; } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (revision 1504363) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (working copy) @@ -75,6 +75,7 @@ import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFrameDef; import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFunctionDef; import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowTableFunctionDef; +import org.apache.hadoop.hive.ql.plan.PTFDeserializer; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag; import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator; @@ -128,18 +129,19 @@ UnparseTranslator unparseT) throws SemanticException { init(semAly, hCfg, inputRR, unparseT); - this.ptfInvocation = qSpec; + ptfInvocation = qSpec; ptfDesc = new PTFDesc(); ptfDesc.setLlInfo(llInfo); translatePTFChain(); return ptfDesc; } - public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR, + public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg, + RowResolver inputRR, UnparseTranslator unparseT) throws SemanticException { init(semAly, hCfg, inputRR, unparseT); - this.windowingSpec = wdwSpec; + windowingSpec = wdwSpec; ptfDesc = new PTFDesc(); ptfDesc.setLlInfo(llInfo); WindowTableFunctionDef wdwTFnDef = new WindowTableFunctionDef(); @@ -153,12 +155,12 @@ WindowingTableFunctionResolver tFn = (WindowingTableFunctionResolver) FunctionRegistry.getTableFunctionResolver(FunctionRegistry.WINDOWING_TABLE_FUNCTION); if (tFn == null) { - throw new SemanticException(String.format("INternal Error: Unknown Table Function %s", + throw new SemanticException(String.format("Internal Error: Unknown Table Function %s", FunctionRegistry.WINDOWING_TABLE_FUNCTION)); } wdwTFnDef.setName(FunctionRegistry.WINDOWING_TABLE_FUNCTION); wdwTFnDef.setResolverClassName(tFn.getClass().getName()); - wdwTFnDef.setAlias("ptf_" + 1 ); + wdwTFnDef.setAlias("ptf_" + 1); wdwTFnDef.setExpressionTreeString(null); wdwTFnDef.setTransformsRawInput(false); tFn.initialize(hCfg, ptfDesc, wdwTFnDef); @@ -168,7 +170,7 @@ wdwTFnDef.setRawInputShape(inpShape); PartitioningSpec partiSpec = wdwSpec.getQueryPartitioningSpec(); - if ( partiSpec == null ) { + if (partiSpec == null) { throw new SemanticException( "Invalid use of Windowing: there is no Partitioning associated with Windowing"); } @@ -182,10 +184,10 @@ * process Wdw functions */ ArrayList windowFunctions = new ArrayList(); - if ( wdwSpec.getWindowExpressions() != null ) { - for(WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) { - if ( expr instanceof WindowFunctionSpec) { - WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec)expr); + if (wdwSpec.getWindowExpressions() != null) { + for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) { + if (expr instanceof WindowFunctionSpec) { + WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec) expr); windowFunctions.add(wFnDef); } } @@ -195,13 +197,13 @@ /* * set outputFromWdwFnProcessing */ - if ( windowFunctions.size() > 0 ) { + if (windowFunctions.size() > 0) { ArrayList aliases = new ArrayList(); ArrayList fieldOIs = new ArrayList(); - for(WindowFunctionDef wFnDef : windowFunctions) { + for (WindowFunctionDef wFnDef : windowFunctions) { aliases.add(wFnDef.getAlias()); - if ( wFnDef.isPivotResult() ) { - fieldOIs.add(((ListObjectInspector)wFnDef.getOI()).getListElementObjectInspector()); + if (wFnDef.isPivotResult()) { + fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector()); } else { fieldOIs.add(wFnDef.getOI()); } @@ -223,16 +225,15 @@ */ ShapeDetails wdwOutShape = wdwTFnDef.getOutputFromWdwFnProcessing(); ArrayList windowExpressions = new ArrayList(); - if ( wdwSpec.getWindowExpressions() != null ) { - for(WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) { - if ( !(expr instanceof WindowFunctionSpec) ) { + if (wdwSpec.getWindowExpressions() != null) { + for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) { + if (!(expr instanceof WindowFunctionSpec)) { try { PTFExpressionDef eDef = buildExpressionDef(wdwOutShape, expr.getExpression()); WindowExpressionDef wdwEDef = new WindowExpressionDef(eDef); wdwEDef.setAlias(expr.getAlias()); windowExpressions.add(wdwEDef); - } - catch(HiveException he) { + } catch (HiveException he) { throw new SemanticException(he); } } @@ -243,10 +244,10 @@ /* * set outputOI */ - if ( windowExpressions.size() > 0 ) { + if (windowExpressions.size() > 0) { ArrayList aliases = new ArrayList(); ArrayList fieldOIs = new ArrayList(); - for(WindowExpressionDef wEDef : windowExpressions) { + for (WindowExpressionDef wEDef : windowExpressions) { aliases.add(wEDef.getAlias()); fieldOIs.add(wEDef.getOI()); } @@ -267,7 +268,7 @@ * If we have windowExpressions then we convert to Std. Object to process; * we just stream these rows; no need to put in an output Partition. */ - if ( windowExpressions.size() > 0 ) { + if (windowExpressions.size() > 0) { StructObjectInspector oi = (StructObjectInspector) ObjectInspectorUtils.getStandardObjectInspector(wdwTFnDef.getOutputShape().getOI()); wdwTFnDef.getOutputShape().setOI(oi); @@ -280,17 +281,17 @@ Stack ptfChain = new Stack(); PTFInputSpec currentSpec = ptfInvocation.getFunction(); - while (currentSpec != null ) { + while (currentSpec != null) { ptfChain.push(currentSpec); currentSpec = currentSpec.getInput(); } int inputNum = 0; PTFInputDef currentDef = null; - while ( !ptfChain.isEmpty() ) { + while (!ptfChain.isEmpty()) { currentSpec = ptfChain.pop(); - if ( currentSpec instanceof PTFQueryInputSpec) { + if (currentSpec instanceof PTFQueryInputSpec) { currentDef = translate((PTFQueryInputSpec) currentSpec, inputNum); } else { @@ -300,7 +301,7 @@ } inputNum++; } - ptfDesc.setFuncDef((PartitionedTableFunctionDef)currentDef); + ptfDesc.setFuncDef((PartitionedTableFunctionDef) currentDef); } private PTFQueryInputDef translate(PTFQueryInputSpec spec, @@ -342,8 +343,7 @@ PTFExpressionDef argDef = null; try { argDef = buildExpressionDef(inpDef.getOutputShape(), expr); - } - catch(HiveException he) { + } catch (HiveException he) { throw new SemanticException(he); } def.addArg(argDef); @@ -356,7 +356,7 @@ def.setCarryForwardNames(tFn.carryForwardNames()); tFn.setupRawInputOI(); - if ( tFn.transformsRawInput() ) { + if (tFn.transformsRawInput()) { StructObjectInspector rawInOutOI = tEval.getRawInputOI(); ArrayList rawInOutColNames = tFn.getRawInputColumnNames(); RowResolver rawInRR = buildRowResolverForPTF(def.getName(), @@ -418,8 +418,7 @@ PTFExpressionDef argDef = null; try { argDef = buildExpressionDef(inpShape, expr); - } - catch(HiveException he) { + } catch (HiveException he) { throw new SemanticException(he); } def.addArg(argDef); @@ -432,24 +431,23 @@ } WindowSpec wdwSpec = spec.getWindowSpec(); - if ( wdwSpec != null ) { + if (wdwSpec != null) { String desc = spec.toString(); WindowFrameDef wdwFrame = translate(spec.getName(), inpShape, wdwSpec); - if (!wFnInfo.isSupportsWindow() ) + if (!wFnInfo.isSupportsWindow()) { BoundarySpec start = wdwSpec.getWindowFrame().getStart(); - if ( start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT ) { + if (start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) { throw new SemanticException( - String.format("Expecting left window frame boundary for " + - "function %s to be unbounded. Found : %d", desc, start.getAmt()) - ); + String.format("Expecting left window frame boundary for " + + "function %s to be unbounded. Found : %d", desc, start.getAmt())); } BoundarySpec end = wdwSpec.getWindowFrame().getEnd(); - if ( end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT ) { + if (end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) { throw new SemanticException( String.format("Expecting right window frame boundary for " + - "function %s to be unbounded. Found : %d", desc, start.getAmt())); + "function %s to be unbounded. Found : %d", desc, start.getAmt())); } } def.setWindowFrame(wdwFrame); @@ -457,8 +455,7 @@ try { setupWdwFnEvaluator(def); - } - catch(HiveException he) { + } catch (HiveException he) { throw new SemanticException(he); } @@ -470,7 +467,7 @@ throws SemanticException { applyConstantPartition(spec); - if ( spec.getPartition() == null ) { + if (spec.getPartition() == null) { return; } PartitionDef partDef = translate(def.getRawInputShape(), spec.getPartition()); @@ -483,30 +480,28 @@ * If this the first PPTF in the chain and there is no partition specified * then assume the user wants to include the entire input in 1 partition. */ - private static void applyConstantPartition( PartitionedTableFunctionSpec spec) { - if ( spec.getPartition() != null ) { + private static void applyConstantPartition(PartitionedTableFunctionSpec spec) { + if (spec.getPartition() != null) { return; } PTFInputSpec iSpec = spec.getInput(); - if ( iSpec instanceof PTFInputSpec ) { - PartitionSpec partSpec = new PartitionSpec(); - PartitionExpression partExpr = new PartitionExpression(); - partExpr.setExpression(new ASTNode(new CommonToken(HiveParser.Number, "0"))); - partSpec.addExpression(partExpr); - spec.setPartition(partSpec); + if (iSpec instanceof PTFInputSpec) { + PartitionSpec partSpec = new PartitionSpec(); + PartitionExpression partExpr = new PartitionExpression(); + partExpr.setExpression(new ASTNode(new CommonToken(HiveParser.Number, "0"))); + partSpec.addExpression(partExpr); + spec.setPartition(partSpec); } } private PartitionDef translate(ShapeDetails inpShape, PartitionSpec spec) - throws SemanticException - { + throws SemanticException { if (spec == null || spec.getExpressions() == null || spec.getExpressions().size() == 0) { return null; } PartitionDef pDef = new PartitionDef(); - for (PartitionExpression pExpr : spec.getExpressions()) - { + for (PartitionExpression pExpr : spec.getExpressions()) { PTFExpressionDef expDef = translate(inpShape, pExpr); pDef.addExpression(expDef); } @@ -514,17 +509,16 @@ } private PTFExpressionDef translate(ShapeDetails inpShape, - PartitionExpression pExpr) throws SemanticException - { + PartitionExpression pExpr) throws SemanticException { PTFExpressionDef expDef = null; try { expDef = buildExpressionDef(inpShape, pExpr.getExpression()); - } - catch(HiveException he) { + } catch (HiveException he) { throw new SemanticException(he); } PTFTranslator.validateComparable(expDef.getOI(), - String.format("Partition Expression %s is not a comparable expression", pExpr.getExpression().toStringTree())); + String.format("Partition Expression %s is not a comparable expression", pExpr + .getExpression().toStringTree())); return expDef; } @@ -548,8 +542,7 @@ private OrderExpressionDef translate(ShapeDetails inpShape, OrderExpression oExpr) - throws SemanticException - { + throws SemanticException { OrderExpressionDef oexpDef = new OrderExpressionDef(); oexpDef.setOrder(oExpr.getOrder()); try { @@ -558,8 +551,7 @@ oexpDef.setExprEvaluator(expDef.getExprEvaluator()); oexpDef.setExprNode(expDef.getExprNode()); oexpDef.setOI(expDef.getOI()); - } - catch(HiveException he) { + } catch (HiveException he) { throw new SemanticException(he); } PTFTranslator.validateComparable(oexpDef.getOI(), @@ -573,23 +565,21 @@ /* * Since we componentize Windowing, no need to translate * the Partition & Order specs of individual WFns. - */ + */ return translate(inpShape, spec.getWindowFrame()); } private WindowFrameDef translate(ShapeDetails inpShape, WindowFrameSpec spec) throws SemanticException { - if (spec == null) - { + if (spec == null) { return null; } BoundarySpec s = spec.getStart(); BoundarySpec e = spec.getEnd(); int cmp = s.compareTo(e); - if (cmp > 0) - { + if (cmp > 0) { throw new SemanticException(String.format( "Window range invalid, start boundary is greater than end boundary: %s", spec)); } @@ -602,8 +592,7 @@ private BoundaryDef translate(ShapeDetails inpShape, BoundarySpec bndSpec) throws SemanticException { - if (bndSpec instanceof ValueBoundarySpec) - { + if (bndSpec instanceof ValueBoundarySpec) { ValueBoundarySpec vBndSpec = (ValueBoundarySpec) bndSpec; ValueBoundaryDef vbDef = new ValueBoundaryDef(); vbDef.setAmt(vBndSpec.getAmt()); @@ -612,32 +601,27 @@ PTFExpressionDef exprDef = null; try { exprDef = buildExpressionDef(inpShape, vBndSpec.getExpression()); - } - catch(HiveException he) { + } catch (HiveException he) { throw new SemanticException(he); } PTFTranslator.validateValueBoundaryExprType(exprDef.getOI()); vbDef.setExpressionDef(exprDef); return vbDef; } - else if (bndSpec instanceof RangeBoundarySpec) - { + else if (bndSpec instanceof RangeBoundarySpec) { RangeBoundarySpec rBndSpec = (RangeBoundarySpec) bndSpec; RangeBoundaryDef rbDef = new RangeBoundaryDef(); rbDef.setAmt(rBndSpec.getAmt()); rbDef.setDirection(rBndSpec.getDirection()); return rbDef; - } - else if (bndSpec instanceof CurrentRowSpec) - { + } else if (bndSpec instanceof CurrentRowSpec) { CurrentRowDef cbDef = new CurrentRowDef(); return cbDef; } throw new SemanticException("Unknown Boundary: " + bndSpec); } - static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException - { + static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException { ArrayList args = def.getArgs(); ArrayList argOIs = new ArrayList(); ObjectInspector[] funcArgOIs = null; @@ -650,7 +634,8 @@ funcArgOIs = argOIs.toArray(funcArgOIs); } - GenericUDAFEvaluator wFnEval = FunctionRegistry.getGenericWindowingEvaluator(def.getName(), argOIs, + GenericUDAFEvaluator wFnEval = FunctionRegistry.getGenericWindowingEvaluator(def.getName(), + argOIs, def.isDistinct(), def.isStar()); ObjectInspector OI = wFnEval.init(GenericUDAFEvaluator.Mode.COMPLETE, funcArgOIs); def.setWFnEval(wFnEval); @@ -658,10 +643,8 @@ } private static void validateValueBoundaryExprType(ObjectInspector OI) - throws SemanticException - { - if (!OI.getCategory().equals(Category.PRIMITIVE)) - { + throws SemanticException { + if (!OI.getCategory().equals(Category.PRIMITIVE)) { throw new SemanticException( String.format( "Value Boundary expression must be of primitve type. Found: %s", @@ -691,12 +674,12 @@ } - private ShapeDetails setupTableFnShape(String fnName, ShapeDetails inpShape, StructObjectInspector OI, ArrayList columnNames, RowResolver rr) + private ShapeDetails setupTableFnShape(String fnName, ShapeDetails inpShape, + StructObjectInspector OI, ArrayList columnNames, RowResolver rr) throws SemanticException { if (fnName.equals(FunctionRegistry.NOOP_TABLE_FUNCTION) || fnName.equals( - FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) - { + FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) { return setupShapeForNoop(inpShape, OI, columnNames, rr); } return setupShape(OI, columnNames, rr); @@ -711,9 +694,8 @@ try { serde = PTFTranslator.createLazyBinarySerDe(hCfg, OI, serdePropsMap); - shp.setOI((StructObjectInspector)serde.getObjectInspector()); - } - catch(SerDeException se) { + shp.setOI((StructObjectInspector) serde.getObjectInspector()); + } catch (SerDeException se) { throw new SemanticException(se); } @@ -781,22 +763,20 @@ if (numOfPartColumns != 0 && numOfPartColumns != partCols.size()) { List partitionColumnNames = new ArrayList(); - for(PartitionExpression partitionExpression : partCols) { + for (PartitionExpression partitionExpression : partCols) { ASTNode column = partitionExpression.getExpression(); - if(column != null && column.getChildCount() > 0) { + if (column != null && column.getChildCount() > 0) { partitionColumnNames.add(column.getChild(0).getText()); } } throw new SemanticException( String.format( - "all partition columns %s must be in order clause or none should be specified", - partitionColumnNames.toString())); + "all partition columns %s must be in order clause or none should be specified", + partitionColumnNames.toString())); } ArrayList combinedOrdExprs = new ArrayList(); - if (numOfPartColumns == 0) - { - for (PartitionExpression partCol : partCols) - { + if (numOfPartColumns == 0) { + for (PartitionExpression partCol : partCols) { OrderExpression orderCol = new OrderExpression(partCol); combinedOrdExprs.add(orderCol); } @@ -811,8 +791,7 @@ */ protected static final ArrayList RANKING_FUNCS = new ArrayList(); - static - { + static { RANKING_FUNCS.add("rank"); RANKING_FUNCS.add("dense_rank"); RANKING_FUNCS.add("percent_rank"); @@ -822,17 +801,13 @@ private void setupRankingArgs(WindowTableFunctionDef wdwTFnDef, WindowFunctionDef wFnDef, WindowFunctionSpec wSpec) - throws SemanticException - { - if (wSpec.getArgs().size() > 0) - { + throws SemanticException { + if (wSpec.getArgs().size() > 0) { throw new SemanticException("Ranking Functions can take no arguments"); } - OrderDef oDef = wdwTFnDef.getOrder(); ArrayList oExprs = oDef.getExpressions(); - for (OrderExpressionDef oExpr : oExprs) - { + for (OrderExpressionDef oExpr : oExprs) { wFnDef.addArg(oExpr); } } @@ -841,8 +816,7 @@ * Expr translation helper methods */ public PTFExpressionDef buildExpressionDef(ShapeDetails inpShape, ASTNode arg) - throws HiveException - { + throws HiveException { PTFExpressionDef argDef = new PTFExpressionDef(); ExprNodeDesc exprNode = semAly.genExprNodeDesc(arg, inpShape.getRr(), @@ -860,8 +834,7 @@ private ObjectInspector initExprNodeEvaluator(ExprNodeEvaluator exprEval, ExprNodeDesc exprNode, ShapeDetails inpShape) - throws HiveException - { + throws HiveException { ObjectInspector outOI; outOI = exprEval.initialize(inpShape.getOI()); @@ -892,10 +865,10 @@ */ protected static SerDe createLazyBinarySerDe(Configuration cfg, - StructObjectInspector oi, Map serdePropsMap) throws SerDeException { + StructObjectInspector oi, Map serdePropsMap) throws SerDeException { serdePropsMap = serdePropsMap == null ? new LinkedHashMap() : serdePropsMap; - addOIPropertiestoSerDePropsMap(oi, serdePropsMap); + PTFDeserializer.addOIPropertiestoSerDePropsMap(oi, serdePropsMap); SerDe serDe = new LazyBinarySerDe(); Properties p = new Properties(); @@ -909,48 +882,21 @@ } @SuppressWarnings({"unchecked"}) - public static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI, - Map serdePropsMap) { - if ( serdePropsMap == null ) { - return; - } - - ArrayList[] tInfo = getTypeMap(OI); - - ArrayList columnNames = (ArrayList) tInfo[0]; - ArrayList fields = (ArrayList) tInfo[1]; - StringBuilder cNames = new StringBuilder(); - StringBuilder cTypes = new StringBuilder(); - - for (int i = 0; i < fields.size(); i++) - { - cNames.append(i > 0 ? "," : ""); - cTypes.append(i > 0 ? "," : ""); - cNames.append(columnNames.get(i)); - cTypes.append(fields.get(i).getTypeName()); - } - - serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, - cNames.toString()); - serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, - cTypes.toString()); - } - private static ArrayList[] getTypeMap( StructObjectInspector oi) { StructTypeInfo t = (StructTypeInfo) TypeInfoUtils .getTypeInfoFromObjectInspector(oi); ArrayList fnames = t.getAllStructFieldNames(); ArrayList fields = t.getAllStructFieldTypeInfos(); - return new ArrayList[] - { fnames, fields }; + return new ArrayList[] {fnames, fields}; } /** * For each column on the input RR, construct a StructField for it * OI is constructed using the list of input column names and * their corresponding OIs. + * * @param rr * @return */ @@ -972,20 +918,16 @@ } protected static void validateComparable(ObjectInspector OI, String errMsg) - throws SemanticException - { - if (!ObjectInspectorUtils.compareSupported(OI)) - { + throws SemanticException { + if (!ObjectInspectorUtils.compareSupported(OI)) { throw new SemanticException(errMsg); } } private static void addInputColumnsToList(ShapeDetails shape, - ArrayList fieldNames, ArrayList fieldOIs) - { + ArrayList fieldNames, ArrayList fieldOIs) { StructObjectInspector OI = shape.getOI(); - for (StructField f : OI.getAllStructFieldRefs()) - { + for (StructField f : OI.getAllStructFieldRefs()) { fieldNames.add(f.getFieldName()); fieldOIs.add(f.getFieldObjectInspector()); } @@ -999,15 +941,14 @@ StructObjectInspector rowObjectInspector, ArrayList outputColNames, RowResolver inputRR) throws SemanticException { - if ( tbFnName.equals(FunctionRegistry.NOOP_TABLE_FUNCTION) || - tbFnName.equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION) ) { + if (tbFnName.equals(FunctionRegistry.NOOP_TABLE_FUNCTION) || + tbFnName.equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) { return buildRowResolverForNoop(tabAlias, rowObjectInspector, inputRR); } RowResolver rwsch = new RowResolver(); List fields = rowObjectInspector.getAllStructFieldRefs(); - for (int i = 0; i < fields.size(); i++) - { + for (int i = 0; i < fields.size(); i++) { ColumnInfo colInfo = new ColumnInfo(fields.get(i).getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i) .getFieldObjectInspector()), @@ -1019,14 +960,14 @@ } protected RowResolver buildRowResolverForWindowing(WindowTableFunctionDef def, - boolean addWdwExprs) throws SemanticException { + boolean addWdwExprs) throws SemanticException { RowResolver rr = new RowResolver(); HashMap aliasToExprMap = windowingSpec.getAliasToWdwExpr(); /* * add Window Expressions */ - if ( addWdwExprs ) { - for(WindowExpressionDef wEDef : def.getWindowExpressions() ) { + if (addWdwExprs) { + for (WindowExpressionDef wEDef : def.getWindowExpressions()) { ASTNode ast = aliasToExprMap.get(wEDef.getAlias()).getExpression(); ColumnInfo cInfo = new ColumnInfo(wEDef.getAlias(), TypeInfoUtils.getTypeInfoFromObjectInspector(wEDef.getOI()), @@ -1039,11 +980,11 @@ /* * add Window Functions */ - for(WindowFunctionDef wFnDef : def.getWindowFunctions() ) { + for (WindowFunctionDef wFnDef : def.getWindowFunctions()) { ASTNode ast = aliasToExprMap.get(wFnDef.getAlias()).getExpression(); ObjectInspector wFnOI = null; - if ( wFnDef.isPivotResult() ) { - wFnOI = ((ListObjectInspector)wFnDef.getOI()).getListElementObjectInspector(); + if (wFnDef.isPivotResult()) { + wFnOI = ((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector(); } else { wFnOI = wFnDef.getOI(); @@ -1068,10 +1009,9 @@ } ASTNode inExpr = null; inExpr = PTFTranslator.getASTNode(inpCInfo, inpRR); - if ( inExpr != null ) { + if (inExpr != null) { rr.putExpression(inExpr, cInfo); - } - else { + } else { rr.put(cInfo.getTabAlias(), colAlias, cInfo); } } @@ -1081,13 +1021,11 @@ protected static RowResolver buildRowResolverForNoop(String tabAlias, StructObjectInspector rowObjectInspector, - RowResolver inputRowResolver) throws SemanticException - { + RowResolver inputRowResolver) throws SemanticException { LOG.info("QueryTranslationInfo::getRowResolver invoked on ObjectInspector"); RowResolver rwsch = new RowResolver(); List fields = rowObjectInspector.getAllStructFieldRefs(); - for (int i = 0; i < fields.size(); i++) - { + for (int i = 0; i < fields.size(); i++) { StructField field = fields.get(i); String internalName = field.getFieldName(); String[] tabColAlias = inputRowResolver == null ? null : inputRowResolver @@ -1099,8 +1037,7 @@ if (tabColAlias != null) { inpColInfo = inputRowResolver.get(colTabAlias, colAlias); - } - else { + } else { /* * for the Virtual columns: * - the internalName is UPPER Case and the alias is lower case @@ -1115,8 +1052,7 @@ if (inpColInfo != null) { colInfo = new ColumnInfo(inpColInfo); colInfo.setTabAlias(tabAlias); - } - else { + } else { colInfo = new ColumnInfo(fields.get(i).getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i) .getFieldObjectInspector()), @@ -1130,8 +1066,7 @@ if (expr != null) { rwsch.putExpression(expr, colInfo); - } - else { + } else { rwsch.put(tabAlias, colAlias, colInfo); } } @@ -1142,9 +1077,9 @@ * If the cInfo is for an ASTNode, this function returns the ASTNode that it is for. */ public static ASTNode getASTNode(ColumnInfo cInfo, RowResolver rr) throws SemanticException { - for(Map.Entry entry : rr.getExpressionMap().entrySet()) { + for (Map.Entry entry : rr.getExpressionMap().entrySet()) { ASTNode expr = entry.getValue(); - if ( rr.getExpression(expr).equals(cInfo)) { + if (rr.getExpression(expr).equals(cInfo)) { return expr; } } @@ -1160,26 +1095,27 @@ /** Do the recursive work for visit */ private static void _visit(Object t, Object parent, int childIndex, ContextVisitor visitor) { - if ( t==null ) { + if (t == null) { return; } visitor.visit(t, parent, childIndex, null); int n = ParseDriver.adaptor.getChildCount(t); - for (int i=0; i componentize(PTFInvocationSpec ptfInvocation) throws SemanticException { + public static ArrayList componentize(PTFInvocationSpec ptfInvocation) + throws SemanticException { ArrayList componentInvocations = new ArrayList(); Stack ptfChain = new Stack(); PTFInputSpec spec = ptfInvocation.getFunction(); while (spec instanceof PartitionedTableFunctionSpec) { - ptfChain.push(spec); - spec = spec.getInput(); + ptfChain.push(spec); + spec = spec.getInput(); } PartitionedTableFunctionSpec prevFn = (PartitionedTableFunctionSpec) ptfChain.pop(); @@ -1187,12 +1123,12 @@ PartitionSpec partSpec = prevFn.getPartition(); OrderSpec orderSpec = prevFn.getOrder(); - if ( partSpec == null ) { - //oops this should have been caught before trying to componentize + if (partSpec == null) { + // oops this should have been caught before trying to componentize throw new SemanticException( "No Partitioning specification specified at start of a PTFChain"); } - if ( orderSpec == null ) { + if (orderSpec == null) { orderSpec = new OrderSpec(partSpec); prevFn.setOrder(orderSpec); } @@ -1256,33 +1192,28 @@ */ Map> mapTopExprToLLFunExprs; - private void addLeadLagExpr(ExprNodeGenericFuncDesc llFunc) - { + private void addLeadLagExpr(ExprNodeGenericFuncDesc llFunc) { leadLagExprs = leadLagExprs == null ? new ArrayList() : leadLagExprs; leadLagExprs.add(llFunc); } - public List getLeadLagExprs() - { + public List getLeadLagExprs() { return leadLagExprs; } - public void addLLFuncExprForTopExpr(ExprNodeDesc topExpr, ExprNodeGenericFuncDesc llFuncExpr) - { + public void addLLFuncExprForTopExpr(ExprNodeDesc topExpr, ExprNodeGenericFuncDesc llFuncExpr) { addLeadLagExpr(llFuncExpr); mapTopExprToLLFunExprs = mapTopExprToLLFunExprs == null ? new HashMap>() : mapTopExprToLLFunExprs; List funcList = mapTopExprToLLFunExprs.get(topExpr); - if (funcList == null) - { + if (funcList == null) { funcList = new ArrayList(); mapTopExprToLLFunExprs.put(topExpr, funcList); } funcList.add(llFuncExpr); } - public List getLLFuncExprsInTopExpr(ExprNodeDesc topExpr) - { + public List getLLFuncExprsInTopExpr(ExprNodeDesc topExpr) { if (mapTopExprToLLFunExprs == null) { return null; } @@ -1291,8 +1222,7 @@ } public static void validateNoLeadLagInValueBoundarySpec(ASTNode node) - throws SemanticException - { + throws SemanticException { String errMsg = "Lead/Lag not allowed in ValueBoundary Spec"; TreeWizard tw = new TreeWizard(ParseDriver.adaptor, HiveParser.tokenNames); ValidateNoLeadLag visitor = new ValidateNoLeadLag(errMsg); @@ -1300,37 +1230,30 @@ visitor.checkValid(); } - public static class ValidateNoLeadLag implements - ContextVisitor - { + public static class ValidateNoLeadLag implements ContextVisitor { String errMsg; boolean throwError = false; ASTNode errorNode; - public ValidateNoLeadLag(String errMsg) - { + public ValidateNoLeadLag(String errMsg) { this.errMsg = errMsg; } @SuppressWarnings("rawtypes") @Override - public void visit(Object t, Object parent, int childIndex, Map labels) - { + public void visit(Object t, Object parent, int childIndex, Map labels) { ASTNode expr = (ASTNode) t; ASTNode nameNode = (ASTNode) expr.getChild(0); if (nameNode.getText().equals(FunctionRegistry.LEAD_FUNC_NAME) || nameNode.getText() - .equals(FunctionRegistry.LAG_FUNC_NAME)) - { + .equals(FunctionRegistry.LAG_FUNC_NAME)) { throwError = true; errorNode = expr; } } - void checkValid() throws SemanticException - { - if (throwError) - { + void checkValid() throws SemanticException { + if (throwError) { throw new SemanticException(errMsg + errorNode.toStringTree()); } }