diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java index b1c5521ae7..6a16108e19 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.hooks; import java.io.Serializable; -import java.util.ArrayList; +import java.util.List; import org.junit.Assert; @@ -33,8 +33,7 @@ public void run(HookContext hookContext) { // Go through the root tasks, and verify the input format of the map reduce task(s) is // HiveSortedInputFormat - ArrayList> rootTasks = - hookContext.getQueryPlan().getRootTasks(); + List> rootTasks = hookContext.getQueryPlan().getRootTasks(); for (Task rootTask : rootTasks) { if (rootTask.getWork() instanceof MapredWork) { Assert.assertTrue("The root map reduce task's input was not marked as sorted.", diff --git ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java index 7e49b6c883..387fe61289 100644 --- ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java +++ ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java @@ -76,18 +76,18 @@ private String optimizedCBOPlan; private String optimizedQueryString; - private ArrayList> rootTasks; + private List> rootTasks; private FetchTask fetchTask; private final List reducerTimeStatsPerJobList; - private HashSet inputs; + private Set inputs; /** * Note: outputs are not all determined at compile time. * Some of the tasks can change the outputs at run time, because only at run * time, we know what are the changes. These tasks should keep a reference * to the outputs here. */ - private HashSet outputs; + private Set outputs; /** * Lineage information for the query. */ @@ -96,7 +96,7 @@ private ColumnAccessInfo columnAccessInfo; private Schema resultSchema; - private HashMap idToTableNameMap; + private Map idToTableNameMap; private String queryId; private org.apache.hadoop.hive.ql.plan.api.Query query; @@ -696,11 +696,11 @@ public void setDone() { return done; } - public ArrayList> getRootTasks() { + public List> getRootTasks() { return rootTasks; } - public void setRootTasks(ArrayList> rootTasks) { + public void setRootTasks(List> rootTasks) { this.rootTasks = rootTasks; } @@ -716,7 +716,7 @@ public void setFetchTask(FetchTask fetchTask) { this.fetchTask = fetchTask; } - public HashSet getInputs() { + public Set getInputs() { return inputs; } @@ -724,7 +724,7 @@ public void setInputs(HashSet inputs) { this.inputs = inputs; } - public HashSet getOutputs() { + public Set getOutputs() { return outputs; } @@ -736,11 +736,11 @@ public Schema getResultSchema() { return resultSchema; } - public HashMap getIdToTableNameMap() { + public Map getIdToTableNameMap() { return idToTableNameMap; } - public void setIdToTableNameMap(HashMap idToTableNameMap) { + public void setIdToTableNameMap(Map idToTableNameMap) { this.idToTableNameMap = idToTableNameMap; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java index 3265cc1fe1..7220f33c8c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java @@ -273,7 +273,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { } } // init aggregationParameterFields - ArrayList aggrs = conf.getAggregators(); + List aggrs = conf.getAggregators(); aggregationParameterFields = new ExprNodeEvaluator[aggrs.size()][]; aggregationParameterObjectInspectors = new ObjectInspector[aggrs.size()][]; aggregationParameterStandardObjectInspectors = new ObjectInspector[aggrs.size()][]; @@ -281,7 +281,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { aggregationIsDistinct = new boolean[aggrs.size()]; for (int i = 0; i < aggrs.size(); i++) { AggregationDesc aggr = aggrs.get(i); - ArrayList parameters = aggr.getParameters(); + List parameters = aggr.getParameters(); aggregationParameterFields[i] = new ExprNodeEvaluator[parameters.size()]; aggregationParameterObjectInspectors[i] = new ObjectInspector[parameters .size()]; @@ -542,7 +542,7 @@ private void estimateRowSize() throws HiveException { // 64 bytes is the overhead for a reference fixedRowSize = javaHashEntryOverHead; - ArrayList keys = conf.getKeys(); + List keys = conf.getKeys(); // Go over all the keys and get the size of the fields of fixed length. Keep // track of the variable length keys @@ -1118,14 +1118,14 @@ public void closeOp(boolean abort) throws HiveException { public List genColLists( HashMap, OpParseContext> opParseCtx) { List colLists = new ArrayList(); - ArrayList keys = conf.getKeys(); + List keys = conf.getKeys(); for (ExprNodeDesc key : keys) { colLists = Utilities.mergeUniqElems(colLists, key.getCols()); } - ArrayList aggrs = conf.getAggregators(); + List aggrs = conf.getAggregators(); for (AggregationDesc aggr : aggrs) { - ArrayList params = aggr.getParameters(); + List params = aggr.getParameters(); for (ExprNodeDesc param : params) { colLists = Utilities.mergeUniqElems(colLists, param.getCols()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java index 15a2cbc769..526929984a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java @@ -95,8 +95,8 @@ public LateralViewJoinOperator(CompilationOpContext ctx) { protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); - ArrayList ois = new ArrayList(); - ArrayList fieldNames = conf.getOutputInternalColNames(); + List ois = new ArrayList(); + List fieldNames = conf.getOutputInternalColNames(); // The output of the lateral view join will be the columns from the select // parent, followed by the column from the UDTF parent @@ -118,10 +118,10 @@ protected void initializeOp(Configuration hconf) throws HiveException { } // acc is short for accumulator. It's used to build the row before forwarding - ArrayList acc = new ArrayList(); + List acc = new ArrayList(); // selectObjs hold the row from the select op, until receiving a row from // the udtf op - ArrayList selectObjs = new ArrayList(); + List selectObjs = new ArrayList(); /** * An important assumption for processOp() is that for a given row from the diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java index 72dc631e7d..8e74f8a8b8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java @@ -31,24 +31,24 @@ public class RowSchema implements Serializable { private static final long serialVersionUID = 1L; - private ArrayList signature = new ArrayList(); + private List signature = new ArrayList(); public RowSchema() { } public RowSchema(RowSchema that) { - this.signature = (ArrayList) that.signature.clone(); + this.signature = new ArrayList<>(that.signature); } - public RowSchema(ArrayList signature) { + public RowSchema(List signature) { this.signature = signature; } - public void setSignature(ArrayList signature) { + public void setSignature(List signature) { this.signature = signature; } - public ArrayList getSignature() { + public List getSignature() { return signature; } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/NoOperatorReuseCheckerHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/NoOperatorReuseCheckerHook.java index 494459abd7..82b8c68cf3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/NoOperatorReuseCheckerHook.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/NoOperatorReuseCheckerHook.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.hooks; import java.io.Serializable; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -41,16 +40,12 @@ import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.TezWork; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Checks whenever operator ids are not reused. */ public class NoOperatorReuseCheckerHook implements ExecuteWithHookContext { - private static final Logger LOG = LoggerFactory.getLogger(NoOperatorReuseCheckerHook.class); - static class UniqueOpIdChecker implements NodeProcessor { Map> opMap = new HashMap<>(); @@ -74,7 +69,7 @@ public void run(HookContext hookContext) throws Exception { List rootOps = Lists.newArrayList(); - ArrayList> roots = hookContext.getQueryPlan().getRootTasks(); + List> roots = hookContext.getQueryPlan().getRootTasks(); for (Task task : roots) { Object work = task.getWork(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java index 3a5b334716..d3fb91eddb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java @@ -191,21 +191,19 @@ protected boolean checkConvertBucketMapJoin( String baseBigAlias, List joinAliases) throws SemanticException { - LinkedHashMap> tblAliasToNumberOfBucketsInEachPartition = + Map> tblAliasToNumberOfBucketsInEachPartition = new LinkedHashMap>(); - LinkedHashMap>> tblAliasToBucketedFilePathsInEachPartition = + Map>> tblAliasToBucketedFilePathsInEachPartition = new LinkedHashMap>>(); - HashMap topOps = pGraphContext.getTopOps(); + Map topOps = pGraphContext.getTopOps(); - HashMap aliasToNewAliasMap = new HashMap(); + Map aliasToNewAliasMap = new HashMap(); // (partition to bucket file names) and (partition to bucket number) for // the big table; - LinkedHashMap> bigTblPartsToBucketFileNames = - new LinkedHashMap>(); - LinkedHashMap bigTblPartsToBucketNumber = - new LinkedHashMap(); + Map> bigTblPartsToBucketFileNames = new LinkedHashMap>(); + Map bigTblPartsToBucketNumber = new LinkedHashMap(); Integer[] joinKeyOrder = null; // accessing order of join cols to bucket cols, should be same boolean bigTablePartitioned = true; diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java index 697d3b2374..d3749fdcf1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java @@ -139,14 +139,14 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, List colLists = new ArrayList<>(); GroupByDesc conf = gbOp.getConf(); - ArrayList keys = conf.getKeys(); + List keys = conf.getKeys(); for (ExprNodeDesc key : keys) { colLists = mergeFieldNodesWithDesc(colLists, key); } - ArrayList aggrs = conf.getAggregators(); + List aggrs = conf.getAggregators(); for (AggregationDesc aggr : aggrs) { - ArrayList params = aggr.getParameters(); + List params = aggr.getParameters(); for (ExprNodeDesc param : params) { colLists = mergeFieldNodesWithDesc(colLists, param); } @@ -812,10 +812,10 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, } } if (cols.size() < originalOutputColumnNames.size()) { - ArrayList newColList = new ArrayList(); - ArrayList newOutputColumnNames = new ArrayList(); - ArrayList rs_oldsignature = op.getSchema().getSignature(); - ArrayList rs_newsignature = new ArrayList(); + List newColList = new ArrayList(); + List newOutputColumnNames = new ArrayList(); + List rs_oldsignature = op.getSchema().getSignature(); + List rs_newsignature = new ArrayList(); // The pruning needs to preserve the order of columns in the input schema Set colNames = new HashSet(); for (FieldNode col : cols) { @@ -899,8 +899,8 @@ private static void pruneReduceSinkOperator(boolean[] retainFlags, Map oldMap = reduce.getColumnExprMap(); LOG.info("RS " + reduce.getIdentifier() + " oldColExprMap: " + oldMap); RowSchema oldRS = reduce.getSchema(); - ArrayList old_signature = oldRS.getSignature(); - ArrayList signature = new ArrayList(old_signature); + List old_signature = oldRS.getSignature(); + List signature = new ArrayList(old_signature); List valueColNames = reduceConf.getOutputValueColumnNames(); ArrayList newValueColNames = new ArrayList(); @@ -1071,8 +1071,8 @@ private static void pruneOperator(NodeProcessorCtx ctx, throws SemanticException { RowSchema inputSchema = op.getSchema(); if (inputSchema != null) { - ArrayList rs = new ArrayList<>(); - ArrayList inputCols = inputSchema.getSignature(); + List rs = new ArrayList<>(); + List inputCols = inputSchema.getSignature(); for (ColumnInfo i: inputCols) { FieldNode fn = lookupColumn(cols, i.getInternalName()); if (fn != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java index a0482537ae..b111e4f428 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java @@ -36,10 +36,8 @@ import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.LimitOperator; import org.apache.hadoop.hive.ql.exec.Operator; -import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.UnionOperator; -import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -136,7 +134,7 @@ public ConstantPropagateProcCtx(ConstantPropagateOption option) { return constants; } - ArrayList signature = op.getSchema().getSignature(); + List signature = op.getSchema().getSignature(); if (op instanceof LimitOperator || op instanceof FilterOperator) { // there should be only one parent. if (op.getParentOperators().size() == 1) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java index acb9788baa..c1981e6981 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java @@ -1120,7 +1120,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object.. } GroupByDesc conf = op.getConf(); - ArrayList keys = conf.getKeys(); + List keys = conf.getKeys(); for (int i = 0; i < keys.size(); i++) { ExprNodeDesc key = keys.get(i); ExprNodeDesc newkey = foldExpr(key, colToConstants, cppCtx, op, 0, false); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/CountDistinctRewriteProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/CountDistinctRewriteProc.java index 542d356769..a50ad78e8f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/CountDistinctRewriteProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/CountDistinctRewriteProc.java @@ -142,7 +142,7 @@ protected int checkCountDistinct(GroupByOperator mGby, ReduceSinkOperator rs, GroupByOperator rGby) { // Position of distinct column in aggregator list of map Gby before rewrite. int indexOfDist = -1; - ArrayList keys = mGby.getConf().getKeys(); + List keys = mGby.getConf().getKeys(); if (!(mGby.getConf().getMode() == GroupByDesc.Mode.HASH && !mGby.getConf().isGroupingSetsPresent() && rs.getConf().getKeyCols().size() == 1 && rs.getConf().getPartitionCols().size() == 0 diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index 5d6143d6a4..408ab08274 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -1305,7 +1305,7 @@ public static void createMRWorkForMergingFiles(FileSinkOperator fsInput, DynamicPartitionCtx dpCtx = fsInputDesc.getDynPartCtx(); if (dpCtx != null && dpCtx.getNumDPCols() > 0) { // adding DP ColumnInfo to the RowSchema signature - ArrayList signature = inputRS.getSignature(); + List signature = inputRS.getSignature(); String tblAlias = fsInputDesc.getTableInfo().getTableName(); for (String dpCol : dpCtx.getDPColNames()) { ColumnInfo colInfo = new ColumnInfo(dpCol, diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java index 75bce638a6..bb9681499a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java @@ -35,7 +35,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; -import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; import org.apache.hadoop.hive.ql.lib.Dispatcher; @@ -63,17 +62,17 @@ * */ public static class SamplePrunerCtx implements NodeProcessorCtx { - HashMap opToSamplePruner; + Map opToSamplePruner; public SamplePrunerCtx( - HashMap opToSamplePruner) { + Map opToSamplePruner) { this.opToSamplePruner = opToSamplePruner; } /** * @return the opToSamplePruner */ - public HashMap getOpToSamplePruner() { + public Map getOpToSamplePruner() { return opToSamplePruner; } @@ -102,8 +101,7 @@ public void setOpToSamplePruner( public ParseContext transform(ParseContext pctx) throws SemanticException { // create a the context for walking operators - SamplePrunerCtx samplePrunerCtx = new SamplePrunerCtx(pctx - .getOpToSamplePruner()); + SamplePrunerCtx samplePrunerCtx = new SamplePrunerCtx(pctx.getOpToSamplePruner()); Map opRules = new LinkedHashMap(); opRules.put(new RuleRegExp("R1", diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java index d1585c2901..165cb9efcf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java @@ -498,7 +498,7 @@ OpAttr visit(HiveSortLimit sortRel) throws SemanticException { List keepColumns = new ArrayList(); final ImmutableBitSet sortColsPos = sortColsPosBuilder.build(); final ImmutableBitSet sortOutputColsPos = sortOutputColsPosBuilder.build(); - final ArrayList inputSchema = inputOp.getSchema().getSignature(); + final List inputSchema = inputOp.getSchema().getSignature(); for (int pos=0; pos stack, NodeProcessorCtx procCtx, LateralViewJoinOperator op = (LateralViewJoinOperator)nd; boolean isUdtfPath = true; Operator inpOp = getParent(stack); - ArrayList cols = inpOp.getSchema().getSignature(); + List cols = inpOp.getSchema().getSignature(); lCtx.getIndex().copyPredicates(inpOp, op); if (inpOp instanceof SelectOperator) { @@ -331,7 +330,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // For the select path the columns are the ones at the beginning of the // current operators schema and for the udtf path the columns are // at the end of the operator schema. - ArrayList out_cols = op.getSchema().getSignature(); + List out_cols = op.getSchema().getSignature(); int out_cols_size = out_cols.size(); int cols_size = cols.size(); int outColOffset = isUdtfPath ? out_cols_size - cols_size : 0; @@ -376,7 +375,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, lctx.getIndex().copyPredicates(inpOp, sop); RowSchema rs = sop.getSchema(); - ArrayList col_infos = rs.getSignature(); + List col_infos = rs.getSignature(); int cnt = 0; for(ExprNodeDesc expr : sop.getConf().getColList()) { Dependency dep = ExprProcFactory.getExprDependency(lctx, inpOp, expr, outputMap); @@ -417,7 +416,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, LineageCtx lctx = (LineageCtx)procCtx; GroupByOperator gop = (GroupByOperator)nd; - ArrayList col_infos = gop.getSchema().getSignature(); + List col_infos = gop.getSchema().getSignature(); Operator inpOp = getParent(stack); lctx.getIndex().copyPredicates(inpOp, gop); int cnt = 0; @@ -551,7 +550,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Operator inpOp = getParent(stack); lCtx.getIndex().copyPredicates(inpOp, op); RowSchema rs = op.getSchema(); - ArrayList inp_cols = inpOp.getSchema().getSignature(); + List inp_cols = inpOp.getSchema().getSignature(); // check only for input cols for(ColumnInfo input : inp_cols) { @@ -596,7 +595,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } if (op instanceof GroupByOperator) { - ArrayList col_infos = rop.getSchema().getSignature(); + List col_infos = rop.getSchema().getSignature(); for(ExprNodeDesc expr : rop.getConf().getKeyCols()) { lCtx.getIndex().putDependency(rop, col_infos.get(cnt++), ExprProcFactory.getExprDependency(lCtx, inpOp, expr, outputMap)); @@ -666,7 +665,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, lCtx.getIndex().addPredicate(fop, cond); } - ArrayList inp_cols = inpOp.getSchema().getSignature(); + List inp_cols = inpOp.getSchema().getSignature(); int cnt = 0; for(ColumnInfo ci : rs.getSignature()) { lCtx.getIndex().putDependency(fop, ci, @@ -700,7 +699,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Operator inpOp = getParent(stack); lCtx.getIndex().copyPredicates(inpOp, op); RowSchema rs = op.getSchema(); - ArrayList inp_cols = inpOp.getSchema().getSignature(); + List inp_cols = inpOp.getSchema().getSignature(); int cnt = 0; for(ColumnInfo ci : rs.getSignature()) { lCtx.getIndex().putDependency(op, ci, diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 087ba47cab..a846d9ba21 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -3177,7 +3177,7 @@ private boolean validateAggregationDesc(AggregationDesc aggDesc, GroupByDesc.Mod } - ArrayList parameters = aggDesc.getParameters(); + List parameters = aggDesc.getParameters(); if (parameters != null && !validateExprNodeDesc(parameters, "Aggregation Function UDF " + udfName + " parameter")) { return false; @@ -4603,7 +4603,7 @@ private boolean usesVectorUDFAdaptor(VectorExpression[] vecExprs) { // For now, we don't support group by on DECIMAL_64 keys. VectorExpression[] vecKeyExpressions = vContext.getVectorExpressionsUpConvertDecimal64(keysDesc); - ArrayList aggrDesc = groupByDesc.getAggregators(); + List aggrDesc = groupByDesc.getAggregators(); final int size = aggrDesc.size(); VectorAggregationDesc[] vecAggrDescs = new VectorAggregationDesc[size]; @@ -4820,7 +4820,7 @@ private static void createVectorPTFDesc(Operator ptfOp, List windowsFunctions = windowTableFunctionDef.getWindowFunctions(); final int functionCount = windowsFunctions.size(); - ArrayList outputSignature = ptfOp.getSchema().getSignature(); + List outputSignature = ptfOp.getSchema().getSignature(); final int outputSize = outputSignature.size(); /* diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java index 7f0c120d1b..d8c4ccb61e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.optimizer.ppr; -import java.util.HashMap; +import java.util.Map; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -34,16 +34,16 @@ * Map from tablescan operator to partition pruning predicate that is * initialized from the ParseContext. */ - private final HashMap opToPartPruner; + private final Map opToPartPruner; /** * Constructor. */ - public OpWalkerCtx(HashMap opToPartPruner) { + public OpWalkerCtx(Map opToPartPruner) { this.opToPartPruner = opToPartPruner; } - public HashMap getOpToPartPruner() { + public Map getOpToPartPruner() { return opToPartPruner; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 370697380e..95bed20787 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -127,7 +127,7 @@ protected CompilationOpContext cContext; protected Context ctx; - protected HashMap idToTableNameMap; + protected Map idToTableNameMap; protected QueryProperties queryProperties; /** @@ -147,11 +147,11 @@ /** * ReadEntities that are passed to the hooks. */ - protected HashSet inputs; + protected Set inputs; /** * List of WriteEntities that are passed to the hooks. */ - protected HashSet outputs; + protected Set outputs; /** * Lineage information for the query. */ @@ -266,7 +266,7 @@ protected static Hive createHiveDB(HiveConf conf) throws SemanticException { } } - public HashMap getIdToTableNameMap() { + public Map getIdToTableNameMap() { return idToTableNameMap; } @@ -656,11 +656,11 @@ private static String spliceString(String str, int i, int length, String replace return str.substring(0, i) + replacement + str.substring(i + length); } - public HashSet getInputs() { + public Set getInputs() { return inputs; } - public HashSet getOutputs() { + public Set getOutputs() { return outputs; } @@ -1943,7 +1943,7 @@ protected void analyzeDDLSkewedValues(List> skewedValues, ASTNode c } break; case HiveParser.TOK_TABCOLVALUE_PAIR: - ArrayList vLNodes = vAstNode.getChildren(); + List vLNodes = vAstNode.getChildren(); for (Node node : vLNodes) { if ( ((ASTNode) node).getToken().getType() != HiveParser.TOK_TABCOLVALUES) { throw new SemanticException( @@ -2239,11 +2239,11 @@ protected String toMessage(ErrorMsg message, Object detail) { return rootTasks; } - public HashSet getAllInputs() { + public Set getAllInputs() { return inputs; } - public HashSet getAllOutputs() { + public Set getAllOutputs() { return outputs; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 9dd6954f35..c3184a8931 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -1604,8 +1604,8 @@ Operator getOptimizedHiveOPDag() throws SemanticException { // SemanticAnalyzer Operator handleInsertStatement(String dest, Operator input, RowResolver inputRR, QB qb) throws SemanticException { - ArrayList colList = new ArrayList(); - ArrayList columns = inputRR.getColumnInfos(); + List colList = new ArrayList(); + List columns = inputRR.getColumnInfos(); for (int i = 0; i < columns.size(); i++) { ColumnInfo col = columns.get(i); colList.add(new ExprNodeColumnDesc(col)); @@ -1614,7 +1614,7 @@ Operator getOptimizedHiveOPDag() throws SemanticException { RowResolver out_rwsch = handleInsertStatementSpec(colList, dest, inputRR, qb, selExprList); - ArrayList columnNames = new ArrayList(); + List columnNames = new ArrayList(); Map colExprMap = new HashMap(); for (int i = 0; i < colList.size(); i++) { String outputCol = getColumnInternalName(i); @@ -2493,8 +2493,8 @@ private RelNode genSetOpLogicalPlan(Opcode opcode, String alias, String leftalia // SetOp Rel RowResolver leftRR = this.relToHiveRR.get(leftRel); RowResolver rightRR = this.relToHiveRR.get(rightRel); - HashMap leftmap = leftRR.getFieldMap(leftalias); - HashMap rightmap = rightRR.getFieldMap(rightalias); + Map leftmap = leftRR.getFieldMap(leftalias); + Map rightmap = rightRR.getFieldMap(rightalias); // 2. Validate that SetOp is feasible according to Hive (by using type // info from RR) @@ -3677,7 +3677,7 @@ private AggInfo getHiveAggInfo(ASTNode aggAst, int aggFnLstArgIndx, RowResolver GenericUDAFEvaluator genericUDAFEvaluator = null; if (aggName.toLowerCase().equals(FunctionRegistry.LEAD_FUNC_NAME) || aggName.toLowerCase().equals(FunctionRegistry.LAG_FUNC_NAME)) { - ArrayList originalParameterTypeInfos = SemanticAnalyzer + List originalParameterTypeInfos = SemanticAnalyzer .getWritableObjectInspector(aggParameters); genericUDAFEvaluator = FunctionRegistry.getGenericWindowingEvaluator(aggName, originalParameterTypeInfos, isDistinct, isAllColumns); @@ -3762,7 +3762,7 @@ private RelNode genGBLogicalPlan(QB qb, RelNode srcRel) throws SemanticException } List grpByAstExprs = getGroupByForClause(qbp, detsClauseName); - HashMap aggregationTrees = qbp.getAggregationExprsForClause(detsClauseName); + Map aggregationTrees = qbp.getAggregationExprsForClause(detsClauseName); boolean hasGrpByAstExprs = (grpByAstExprs != null && !grpByAstExprs.isEmpty()) ? true : false; boolean hasAggregationTrees = (aggregationTrees != null && !aggregationTrees.isEmpty()) ? true : false; @@ -4818,11 +4818,11 @@ private RelNode genUDTFPlan(GenericUDTF genericUDTF, String genericUDTFName, Str // ObjectInspector that can be used to initialize the UDTF. Then, the // resulting output object inspector can be used to make the RowResolver // for the UDTF operator - ArrayList inputCols = selectRR.getColumnInfos(); + List inputCols = selectRR.getColumnInfos(); // Create the object inspector for the input columns and initialize the // UDTF - ArrayList colNames = new ArrayList(); + List colNames = new ArrayList(); ObjectInspector[] colOIs = new ObjectInspector[inputCols.size()]; for (int i = 0; i < inputCols.size(); i++) { colNames.add(inputCols.get(i).getInternalName()); @@ -4848,7 +4848,7 @@ private RelNode genUDTFPlan(GenericUDTF genericUDTF, String genericUDTFName, Str } // Generate the output column info's / row resolver using internal names. - ArrayList udtfCols = new ArrayList(); + List udtfCols = new ArrayList(); Iterator colAliasesIter = colAliases.iterator(); for (StructField sf : outputOI.getAllStructFieldRefs()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java index acd2b6d1a3..81f35cec84 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java @@ -234,12 +234,12 @@ private Operator genSelOp(String command, boolean rewritten, Context origCtx) private void replaceSelectOperatorProcess(SelectOperator operator, Operator input) throws HiveException { RowSchema selRS = operator.getSchema(); - ArrayList signature = new ArrayList<>(); + List signature = new ArrayList<>(); OpParseContext inputCtx = sa.opParseCtx.get(input); RowResolver inputRR = inputCtx.getRowResolver(); - ArrayList columns = inputRR.getColumnInfos(); - ArrayList colList = new ArrayList(); - ArrayList columnNames = new ArrayList(); + List columns = inputRR.getColumnInfos(); + List colList = new ArrayList(); + List columnNames = new ArrayList(); Map columnExprMap = new HashMap(); // the column positions in the operator should be like this diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index 76c69cf24b..6e05ced72d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -57,11 +57,11 @@ import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.StringTokenizer; /** @@ -91,8 +91,8 @@ public static class SemanticAnalyzerWrapperContext { private HiveConf conf; private Hive db; - private HashSet inputs; - private HashSet outputs; + private Set inputs; + private Set outputs; private List> tasks; private Logger LOG; private Context ctx; @@ -107,11 +107,11 @@ public Hive getHive() { return db; } - public HashSet getInputs() { + public Set getInputs() { return inputs; } - public HashSet getOutputs() { + public Set getOutputs() { return outputs; } @@ -136,8 +136,8 @@ public DumpType getEventType() { } public SemanticAnalyzerWrapperContext(HiveConf conf, Hive db, - HashSet inputs, - HashSet outputs, + Set inputs, + Set outputs, List> tasks, Logger LOG, Context ctx){ this.conf = conf; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java index ead45ca005..d1fdfc098f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java @@ -24,11 +24,10 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - +import java.util.Set; import org.antlr.runtime.tree.Tree; import org.apache.commons.codec.DecoderException; @@ -42,7 +41,6 @@ import org.apache.hadoop.hive.conf.HiveConf.StrictChecks; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; @@ -390,8 +388,7 @@ private void analyzeLoad(ASTNode ast) throws SemanticException { } Task childTask = TaskFactory.get( - new MoveWork(getInputs(), getOutputs(), loadTableWork, null, true, - isLocal) + new MoveWork(getInputs(), getOutputs(), loadTableWork, null, true, isLocal) ); rootTasks.add(childTask); @@ -558,7 +555,7 @@ private void reparseAndSuperAnalyze(Table table, URI fromURI) throws SemanticExc } @Override - public HashSet getAllOutputs() { + public Set getAllOutputs() { return outputs; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java index d1854844ea..60e04c6057 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java @@ -841,9 +841,9 @@ protected static AbstractSerDe createLazyBinarySerDe(Configuration cfg, */ public static StructObjectInspector getStandardStructOI(RowResolver rr) { StructObjectInspector oi; - ArrayList colLists = rr.getColumnInfos(); - ArrayList structFieldNames = new ArrayList(); - ArrayList structFieldObjectInspectors = new ArrayList(); + List colLists = rr.getColumnInfos(); + List structFieldNames = new ArrayList(); + List structFieldObjectInspectors = new ArrayList(); for (ColumnInfo columnInfo : colLists) { String colName = columnInfo.getInternalName(); ObjectInspector colOI = columnInfo.getObjectInspector(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java index 67b4901b0c..42dd59404b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java @@ -76,23 +76,23 @@ public class ParseContext { - private HashMap opToPartPruner; - private HashMap opToPartList; - private HashMap opToSamplePruner; + private Map opToPartPruner; + private Map opToPartList; + private Map opToSamplePruner; private Map> opToPartToSkewedPruner; - private HashMap topOps; + private Map topOps; private Set joinOps; private Set mapJoinOps; private Set smbMapJoinOps; private List reduceSinkOperatorsAddedByEnforceBucketingSorting; - private HashMap nameToSplitSample; + private Map nameToSplitSample; private List loadTableWork; private List loadFileWork; private List columnStatsAutoGatherContexts; private Context ctx; private QueryState queryState; private HiveConf conf; - private HashMap idToTableNameMap; + private Map idToTableNameMap; private int destTableId; private UnionProcContext uCtx; private List> listMapJoinOpsNoReducer; // list of map join @@ -109,7 +109,7 @@ private GlobalLimitCtx globalLimitCtx; - private HashSet semanticInputs; + private Set semanticInputs; private List> rootTasks; private FetchTask fetchTask; @@ -132,10 +132,8 @@ private Map rsToRuntimeValuesInfo = new LinkedHashMap(); - private Map rsToSemiJoinBranchInfo = - new HashMap<>(); - private Map colExprToGBMap = - new HashMap<>(); + private Map rsToSemiJoinBranchInfo = new HashMap<>(); + private Map colExprToGBMap = new HashMap<>(); private Map> semiJoinHints; private boolean disableMapJoin; @@ -180,21 +178,21 @@ public ParseContext() { */ public ParseContext( QueryState queryState, - HashMap opToPartPruner, - HashMap opToPartList, - HashMap topOps, + Map opToPartPruner, + Map opToPartList, + Map topOps, Set joinOps, Set smbMapJoinOps, List loadTableWork, List loadFileWork, List columnStatsAutoGatherContexts, - Context ctx, HashMap idToTableNameMap, int destTableId, + Context ctx, Map idToTableNameMap, int destTableId, UnionProcContext uCtx, List> listMapJoinOpsNoReducer, Map prunedPartitions, Map tabNameToTabObject, - HashMap opToSamplePruner, + Map opToSamplePruner, GlobalLimitCtx globalLimitCtx, - HashMap nameToSplitSample, - HashSet semanticInputs, List> rootTasks, + Map nameToSplitSample, + Set semanticInputs, List> rootTasks, Map> opToPartToSkewedPruner, Map viewAliasToInput, List reduceSinkOperatorsAddedByEnforceBucketingSorting, @@ -292,7 +290,7 @@ public QueryState getQueryState() { /** * @return the opToPartPruner */ - public HashMap getOpToPartPruner() { + public Map getOpToPartPruner() { return opToPartPruner; } @@ -300,12 +298,11 @@ public QueryState getQueryState() { * @param opToPartPruner * the opToPartPruner to set */ - public void setOpToPartPruner( - HashMap opToPartPruner) { + public void setOpToPartPruner(Map opToPartPruner) { this.opToPartPruner = opToPartPruner; } - public HashMap getOpToPartList() { + public Map getOpToPartList() { return opToPartList; } @@ -322,7 +319,7 @@ public void setReduceSinkOperatorsAddedByEnforceBucketingSorting( /** * @return the topOps */ - public HashMap getTopOps() { + public Map getTopOps() { return topOps; } @@ -330,15 +327,15 @@ public void setReduceSinkOperatorsAddedByEnforceBucketingSorting( * @param topOps * the topOps to set */ - public void setTopOps(HashMap topOps) { + public void setTopOps(Map topOps) { this.topOps = topOps; } - public HashMap getNameToSplitSample() { + public Map getNameToSplitSample() { return nameToSplitSample; } - public void setNameToSplitSample(HashMap nameToSplitSample) { + public void setNameToSplitSample(Map nameToSplitSample) { this.nameToSplitSample = nameToSplitSample; } @@ -364,11 +361,11 @@ public void setLoadFileWork(List loadFileWork) { this.loadFileWork = loadFileWork; } - public HashMap getIdToTableNameMap() { + public Map getIdToTableNameMap() { return idToTableNameMap; } - public void setIdToTableNameMap(HashMap idToTableNameMap) { + public void setIdToTableNameMap(Map idToTableNameMap) { this.idToTableNameMap = idToTableNameMap; } @@ -422,7 +419,7 @@ public void setListMapJoinOpsNoReducer( /** * @return the opToSamplePruner */ - public HashMap getOpToSamplePruner() { + public Map getOpToSamplePruner() { return opToSamplePruner; } @@ -431,7 +428,7 @@ public void setListMapJoinOpsNoReducer( * the opToSamplePruner to set */ public void setOpToSamplePruner( - HashMap opToSamplePruner) { + Map opToSamplePruner) { this.opToSamplePruner = opToSamplePruner; } @@ -507,7 +504,7 @@ public void setGlobalLimitCtx(GlobalLimitCtx globalLimitCtx) { this.globalLimitCtx = globalLimitCtx; } - public HashSet getSemanticInputs() { + public Set getSemanticInputs() { return semanticInputs; } @@ -551,8 +548,7 @@ public PrunedPartitionList getPrunedPartitions(String alias, TableScanOperator t * @param opToPartToSkewedPruner * the opToSkewedPruner to set */ - public void setOpPartToSkewedPruner( - HashMap> opToPartToSkewedPruner) { + public void setOpPartToSkewedPruner(Map> opToPartToSkewedPruner) { this.opToPartToSkewedPruner = opToPartToSkewedPruner; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java index e0bef6b889..7c4a5b758f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java @@ -53,22 +53,22 @@ // keeps track of the right-hand-side table name of the left-semi-join, and // its list of join keys - private transient final HashMap> rhsSemijoin; + private transient final Map> rhsSemijoin; // join conditions - private transient ArrayList> expressions; + private transient List> expressions; // key index to nullsafe join flag - private ArrayList nullsafes; + private List nullsafes; // filters - private transient ArrayList> filters; + private transient List> filters; // outerjoin-pos = other-pos:filter-len, other-pos:filter-len, ... private int[][] filterMap; // filters for pushing - private transient ArrayList> filtersForPushing; + private transient List> filtersForPushing; // user asked for map-side join private boolean mapSideJoin; @@ -93,7 +93,7 @@ public QBJoinTree() { nextTag = 0; noOuterJoin = true; noSemiJoin = true; - rhsSemijoin = new HashMap>(); + rhsSemijoin = new HashMap>(); aliasToOpInfo = new HashMap>(); postJoinFilters = new ArrayList(); } @@ -137,11 +137,11 @@ public void setLeftAliases(String[] leftAliases) { this.leftAliases = leftAliases; } - public ArrayList> getExpressions() { + public List> getExpressions() { return expressions; } - public void setExpressions(ArrayList> expressions) { + public void setExpressions(List> expressions) { this.expressions = expressions; } @@ -192,7 +192,7 @@ public void setNoSemiJoin(boolean semi) { /** * @return the filters */ - public ArrayList> getFilters() { + public List> getFilters() { return filters; } @@ -200,14 +200,14 @@ public void setNoSemiJoin(boolean semi) { * @param filters * the filters to set */ - public void setFilters(ArrayList> filters) { + public void setFilters(List> filters) { this.filters = filters; } /** * @return the filters for pushing */ - public ArrayList> getFiltersForPushing() { + public List> getFiltersForPushing() { return filtersForPushing; } @@ -215,7 +215,7 @@ public void setFilters(ArrayList> filters) { * @param filters for pushing * the filters to set */ - public void setFiltersForPushing(ArrayList> filters) { + public void setFiltersForPushing(List> filters) { this.filtersForPushing = filters; } @@ -275,8 +275,8 @@ public void addRHSSemijoin(String alias) { * @param alias * @param columns */ - public void addRHSSemijoinColumns(String alias, ArrayList columns) { - ArrayList cols = rhsSemijoin.get(alias); + public void addRHSSemijoinColumns(String alias, List columns) { + List cols = rhsSemijoin.get(alias); if (cols == null) { rhsSemijoin.put(alias, columns); } else { @@ -291,7 +291,7 @@ public void addRHSSemijoinColumns(String alias, ArrayList columns) { * @param column */ public void addRHSSemijoinColumns(String alias, ASTNode column) { - ArrayList cols = rhsSemijoin.get(alias); + List cols = rhsSemijoin.get(alias); if (cols == null) { cols = new ArrayList(); cols.add(column); @@ -301,7 +301,7 @@ public void addRHSSemijoinColumns(String alias, ASTNode column) { } } - public ArrayList getRHSSemijoinColumns(String alias) { + public List getRHSSemijoinColumns(String alias) { return rhsSemijoin.get(alias); } @@ -312,9 +312,9 @@ public void addRHSSemijoinColumns(String alias, ASTNode column) { * the source join tree */ public void mergeRHSSemijoin(QBJoinTree src) { - for (Entry> e : src.rhsSemijoin.entrySet()) { + for (Entry> e : src.rhsSemijoin.entrySet()) { String key = e.getKey(); - ArrayList value = rhsSemijoin.get(key); + List value = rhsSemijoin.get(key); if (value == null) { rhsSemijoin.put(key, e.getValue()); } else { @@ -323,11 +323,11 @@ public void mergeRHSSemijoin(QBJoinTree src) { } } - public ArrayList getNullSafes() { + public List getNullSafes() { return nullsafes; } - public void setNullSafes(ArrayList nullSafes) { + public void setNullSafes(List nullSafes) { this.nullsafes = nullSafes; } @@ -426,7 +426,7 @@ public QBJoinTree clone() throws CloneNotSupportedException { cloned.addPostJoinFilter(filter); } // clone rhsSemijoin - for (Entry> entry : rhsSemijoin.entrySet()) { + for (Entry> entry : rhsSemijoin.entrySet()) { cloned.addRHSSemijoinColumns(entry.getKey(), entry.getValue()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java index ed0da84e26..e5fc3d7e05 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java @@ -46,21 +46,21 @@ private ASTNode joinExpr; private ASTNode hints; private List hintList; - private final HashMap aliasToSrc; + private final Map aliasToSrc; /** * insclause-0 -> TOK_TAB ASTNode */ - private final HashMap nameToDest; + private final Map nameToDest; /** * For 'insert into FOO(x,y) select ...' this stores the * insclause-0 -> x,y mapping */ private final Map> nameToDestSchema; - private final HashMap nameToSample; + private final Map nameToSample; private final Map exprToColumnAlias; private final Map destToSelExpr; - private final HashMap destToWhereExpr; - private final HashMap destToGroupby; + private final Map destToWhereExpr; + private final Map destToGroupby; private final Set destRollups; private final Set destCubes; private final Set destGroupingSets; @@ -74,7 +74,7 @@ private boolean isAnalyzeCommand; // used for the analyze command (statistics) private boolean isNoScanAnalyzeCommand; // used for the analyze command (statistics) (noscan) - private final HashMap tableSpecs; // used for statistics + private final Map tableSpecs; // used for statistics private AnalyzeRewriteContext analyzeRewrite; @@ -82,40 +82,40 @@ /** * ClusterBy is a short name for both DistributeBy and SortBy. */ - private final HashMap destToClusterby; + private final Map destToClusterby; /** * DistributeBy controls the hashcode of the row, which determines which * reducer the rows will go to. */ - private final HashMap destToDistributeby; + private final Map destToDistributeby; /** * SortBy controls the reduce keys, which affects the order of rows that the * reducer receives. */ - private final HashMap destToSortby; + private final Map destToSortby; /** * Maping from table/subquery aliases to all the associated lateral view nodes. */ - private final HashMap> aliasToLateralViews; + private final Map> aliasToLateralViews; - private final HashMap destToLateralView; + private final Map destToLateralView; /* Order by clause */ - private final HashMap destToOrderby; + private final Map destToOrderby; // Use SimpleEntry to save the offset and rowcount of limit clause // KEY of SimpleEntry: offset // VALUE of SimpleEntry: rowcount - private final HashMap> destToLimit; + private final Map> destToLimit; private int outerQueryLimit; // used by GroupBy - private final LinkedHashMap> destToAggregationExprs; - private final HashMap> destToDistinctFuncExprs; + private final Map> destToAggregationExprs; + private final Map> destToDistinctFuncExprs; // used by Windowing - private final LinkedHashMap> destToWindowingExprs; + private final Map> destToWindowingExprs; @SuppressWarnings("unused") @@ -144,15 +144,15 @@ public QBParseInfo(String alias, boolean isSubQ) { destCubes = new HashSet(); destGroupingSets = new HashSet(); - destToAggregationExprs = new LinkedHashMap>(); - destToWindowingExprs = new LinkedHashMap>(); + destToAggregationExprs = new LinkedHashMap>(); + destToWindowingExprs = new LinkedHashMap>(); destToDistinctFuncExprs = new HashMap>(); this.alias = StringInternUtils.internIfNotNull(alias); this.isSubQ = isSubQ; outerQueryLimit = -1; - aliasToLateralViews = new HashMap>(); + aliasToLateralViews = new HashMap>(); tableSpecs = new HashMap(); @@ -166,13 +166,11 @@ public void clearAggregationExprsForClause(String clause) { destToAggregationExprs.get(clause).clear(); } - public void setAggregationExprsForClause(String clause, - LinkedHashMap aggregationTrees) { + public void setAggregationExprsForClause(String clause, Map aggregationTrees) { destToAggregationExprs.put(clause, aggregationTrees); } - public void addAggregationExprsForClause(String clause, - LinkedHashMap aggregationTrees) { + public void addAggregationExprsForClause(String clause, Map aggregationTrees) { if (destToAggregationExprs.containsKey(clause)) { destToAggregationExprs.get(clause).putAll(aggregationTrees); } else { @@ -214,12 +212,12 @@ public boolean isInsertIntoTable(String fullTableName) { return insertIntoTables.containsKey(fullTableName.toLowerCase()); } - public HashMap getAggregationExprsForClause(String clause) { + public Map getAggregationExprsForClause(String clause) { return destToAggregationExprs.get(clause); } public void addWindowingExprToClause(String clause, ASTNode windowingExprNode) { - LinkedHashMap windowingExprs = destToWindowingExprs.get(clause); + Map windowingExprs = destToWindowingExprs.get(clause); if ( windowingExprs == null ) { windowingExprs = new LinkedHashMap(); destToWindowingExprs.put(clause, windowingExprs); @@ -227,7 +225,7 @@ public void addWindowingExprToClause(String clause, ASTNode windowingExprNode) { windowingExprs.put(windowingExprNode.toStringTree(), windowingExprNode); } - public HashMap getWindowingExprsForClause(String clause) { + public Map getWindowingExprsForClause(String clause) { return destToWindowingExprs.get(clause); } @@ -337,7 +335,7 @@ public ASTNode getWhrForClause(String clause) { return destToWhereExpr.get(clause); } - public HashMap getDestToWhereExpr() { + public Map getDestToWhereExpr() { return destToWhereExpr; } @@ -357,7 +355,7 @@ public ASTNode getGroupByForClause(String clause) { return destGroupingSets; } - public HashMap getDestToGroupBy() { + public Map getDestToGroupBy() { return destToGroupby; } @@ -388,7 +386,7 @@ public ASTNode getClusterByForClause(String clause) { return destToClusterby.get(clause); } - public HashMap getDestToClusterBy() { + public Map getDestToClusterBy() { return destToClusterby; } @@ -403,7 +401,7 @@ public ASTNode getDistributeByForClause(String clause) { return destToDistributeby.get(clause); } - public HashMap getDestToDistributeBy() { + public Map getDestToDistributeBy() { return destToDistributeby; } @@ -422,11 +420,11 @@ public ASTNode getOrderByForClause(String clause) { return destToOrderby.get(clause); } - public HashMap getDestToSortBy() { + public Map getDestToSortBy() { return destToSortby; } - public HashMap getDestToOrderBy() { + public Map getDestToOrderBy() { return destToOrderby; } @@ -578,7 +576,7 @@ public ASTNode getHints() { return hints; } - public Map> getAliasToLateralViews() { + public Map> getAliasToLateralViews() { return aliasToLateralViews; } @@ -587,7 +585,7 @@ public ASTNode getHints() { } public void addLateralViewForAlias(String alias, ASTNode lateralView) { - ArrayList lateralViews = aliasToLateralViews.get(alias); + List lateralViews = aliasToLateralViews.get(alias); if (lateralViews == null) { lateralViews = new ArrayList(); aliasToLateralViews.put(alias, lateralViews); @@ -620,23 +618,23 @@ public TableSpec getTableSpec() { return tableSpecs.get(tName.next()); } - public HashMap> getDestToLimit() { + public Map> getDestToLimit() { return destToLimit; } - public LinkedHashMap> getDestToAggregationExprs() { + public Map> getDestToAggregationExprs() { return destToAggregationExprs; } - public HashMap> getDestToDistinctFuncExprs() { + public Map> getDestToDistinctFuncExprs() { return destToDistinctFuncExprs; } - public HashMap getNameToSample() { + public Map getNameToSample() { return nameToSample; } - public HashMap getDestToLateralView() { + public Map getDestToLateralView() { return destToLateralView; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java index 55d94f914c..a4e2fe600f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java @@ -39,7 +39,7 @@ public class RowResolver implements Serializable{ private static final long serialVersionUID = 1L; private RowSchema rowSchema; - private LinkedHashMap> rslvMap; + private Map> rslvMap; private HashMap invRslvMap; /* @@ -49,8 +49,8 @@ * invRslvMap. */ private final Map altInvRslvMap; - private Map expressionMap; - private LinkedHashMap> ambiguousColumns; + private Map expressionMap; + private Map> ambiguousColumns; private boolean checkForAmbiguity; // TODO: Refactor this and do in a more object oriented manner @@ -62,12 +62,12 @@ public RowResolver() { rowSchema = new RowSchema(); - rslvMap = new LinkedHashMap>(); + rslvMap = new LinkedHashMap>(); invRslvMap = new HashMap(); altInvRslvMap = new HashMap(); expressionMap = new HashMap(); isExprResolver = false; - ambiguousColumns = new LinkedHashMap>(); + ambiguousColumns = new LinkedHashMap>(); checkForAmbiguity = false; } @@ -117,7 +117,7 @@ public void put(String tab_alias, String col_alias, ColumnInfo colInfo) { private void keepAmbiguousInfo(String col_alias, String tab_alias) { // we keep track of duplicate so that get can check // for ambiguity - LinkedHashMap colAliases = ambiguousColumns.get(tab_alias); + Map colAliases = ambiguousColumns.get(tab_alias); if (colAliases == null) { colAliases = new LinkedHashMap(); ambiguousColumns.put(tab_alias, colAliases); @@ -136,7 +136,7 @@ public boolean addMappingOnly(String tab_alias, String col_alias, ColumnInfo col */ boolean colPresent = invRslvMap.containsKey(colInfo.getInternalName()); - LinkedHashMap f_map = rslvMap.get(tab_alias); + Map f_map = rslvMap.get(tab_alias); if (f_map == null) { f_map = new LinkedHashMap(); rslvMap.put(tab_alias, f_map); @@ -195,7 +195,7 @@ public ColumnInfo get(String tab_alias, String col_alias) throws SemanticExcepti if (tab_alias != null) { tab_alias = tab_alias.toLowerCase(); - HashMap f_map = rslvMap.get(tab_alias); + Map f_map = rslvMap.get(tab_alias); if (f_map == null) { return null; } @@ -203,9 +203,9 @@ public ColumnInfo get(String tab_alias, String col_alias) throws SemanticExcepti } else { boolean found = false; String foundTbl = null; - for (Map.Entry> rslvEntry: rslvMap.entrySet()) { + for (Map.Entry> rslvEntry: rslvMap.entrySet()) { String rslvKey = rslvEntry.getKey(); - LinkedHashMap cmap = rslvEntry.getValue(); + Map cmap = rslvEntry.getValue(); for (Map.Entry cmapEnt : cmap.entrySet()) { if (col_alias.equalsIgnoreCase(cmapEnt.getKey())) { /* @@ -226,7 +226,7 @@ public ColumnInfo get(String tab_alias, String col_alias) throws SemanticExcepti return ret; } - public ArrayList getColumnInfos() { + public List getColumnInfos() { return rowSchema.getSignature(); } @@ -273,7 +273,7 @@ public ColumnInfo get(String tab_alias, String col_alias) throws SemanticExcepti return new ArrayList(columnNames); } - public LinkedHashMap getFieldMap(String tabAlias) { + public Map getFieldMap(String tabAlias) { if (tabAlias == null) { return rslvMap.get(null); } else { @@ -318,11 +318,10 @@ public boolean getIsExprResolver() { public String toString() { StringBuilder sb = new StringBuilder(); - for (Map.Entry> e : rslvMap - .entrySet()) { + for (Map.Entry> e : rslvMap.entrySet()) { String tab = e.getKey(); sb.append(tab + "{"); - HashMap f_map = e.getValue(); + Map f_map = e.getValue(); if (f_map != null) { for (Map.Entry entry : f_map.entrySet()) { sb.append("(" + entry.getKey() + "," + entry.getValue().toString() @@ -338,7 +337,7 @@ public RowSchema getRowSchema() { return rowSchema; } - public LinkedHashMap> getRslvMap() { + public Map> getRslvMap() { return rslvMap; } @@ -514,14 +513,13 @@ private boolean isAmbiguousReference(String tableAlias, String colAlias) { } if(tableAlias != null) { - LinkedHashMap colAliases = ambiguousColumns.get(tableAlias.toLowerCase()); + Map colAliases = ambiguousColumns.get(tableAlias.toLowerCase()); if(colAliases != null && colAliases.containsKey(colAlias.toLowerCase())) { return true; } } else { - for (Map.Entry> ambigousColsEntry: ambiguousColumns.entrySet()) { - String rslvKey = ambigousColsEntry.getKey(); - LinkedHashMap cmap = ambigousColsEntry.getValue(); + for (Map.Entry> ambigousColsEntry: ambiguousColumns.entrySet()) { + Map cmap = ambigousColsEntry.getValue(); for (Map.Entry cmapEnt : cmap.entrySet()) { if (colAlias.equalsIgnoreCase(cmapEnt.getKey())) { return true; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index cb99eb31a1..3313766ee4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -42,6 +42,7 @@ import java.util.Queue; import java.util.Set; import java.util.SortedMap; +import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.UUID; @@ -312,10 +313,10 @@ /** Marks the temporary table created for a serialized CTE. The table is scoped to the query. */ static final String MATERIALIZATION_MARKER = "$MATERIALIZATION"; - private HashMap opToPartPruner; - private HashMap opToPartList; - protected HashMap topOps; - protected LinkedHashMap, OpParseContext> opParseCtx; + private Map opToPartPruner; + private Map opToPartList; + protected Map topOps; + protected Map, OpParseContext> opParseCtx; private List loadTableWork; private List loadFileWork; private final List columnStatsAutoGatherContexts; @@ -326,21 +327,21 @@ private ASTNode ast; private int destTableId; private UnionProcContext uCtx; - List> listMapJoinOpsNoReducer; - private HashMap opToSamplePruner; + private List> listMapJoinOpsNoReducer; + private Map opToSamplePruner; private final Map> opToPartToSkewedPruner; private Map viewProjectToTableSchema; /** * a map for the split sampling, from alias to an instance of SplitSample * that describes percentage and number. */ - private final HashMap nameToSplitSample; - Map> groupOpToInputTables; - Map prunedPartitions; + private final Map nameToSplitSample; + private Map> groupOpToInputTables; + protected Map prunedPartitions; protected List resultSchema; protected CreateViewDesc createVwDesc; - protected MaterializedViewUpdateDesc materializedViewUpdateDesc; - protected ArrayList viewsExpanded; + private MaterializedViewUpdateDesc materializedViewUpdateDesc; + private List viewsExpanded; protected ASTNode viewSelect; protected final UnparseTranslator unparseTranslator; private final GlobalLimitCtx globalLimitCtx; @@ -360,7 +361,7 @@ private boolean mergeIsDirect; // flag for no scan during analyze ... compute statistics - protected boolean noscan; + private boolean noscan; // whether this is a mv rebuild rewritten expression protected MaterializationRebuildMode mvRebuildMode = MaterializationRebuildMode.NONE; @@ -373,19 +374,19 @@ /* * Capture the CTE definitions in a Query. */ - final Map aliasToCTEs; + protected final Map aliasToCTEs; /* * Used to check recursive CTE invocations. Similar to viewsExpanded */ - ArrayList ctesExpanded; + private List ctesExpanded; /* * Whether root tasks after materialized CTE linkage have been resolved */ - boolean rootTasksResolved; + private boolean rootTasksResolved; - protected TableMask tableMask; + private TableMask tableMask; CreateTableDesc tableDesc; @@ -514,7 +515,7 @@ protected void reset(boolean clearCache) { outputs.clear(); } - public void initParseCtx(ParseContext pctx) { + void initParseCtx(ParseContext pctx) { opToPartPruner = pctx.getOpToPartPruner(); opToPartList = pctx.getOpToPartList(); opToSamplePruner = pctx.getOpToSamplePruner(); @@ -551,7 +552,7 @@ public CompilationOpContext getOpContext() { return ctx.getOpContext(); } - public static String genPartValueString(String partColType, String partVal) throws SemanticException { + static String genPartValueString(String partColType, String partVal) throws SemanticException { String returnVal = partVal; if (partColType.equals(serdeConstants.STRING_TYPE_NAME) || partColType.contains(serdeConstants.VARCHAR_TYPE_NAME) || @@ -578,13 +579,13 @@ public static String genPartValueString(String partColType, String partVal) thro return returnVal; } - public void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias) + private void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias) throws SemanticException { doPhase1QBExpr(ast, qbexpr, id, alias, false); } @SuppressWarnings("nls") - public void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias, boolean insideView) + private void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias, boolean insideView) throws SemanticException { assert (ast.getToken() != null); @@ -635,12 +636,12 @@ public void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias, } } - private LinkedHashMap doPhase1GetAggregationsFromSelect( + private Map doPhase1GetAggregationsFromSelect( ASTNode selExpr, QB qb, String dest) throws SemanticException { // Iterate over the selects search for aggregation Trees. // Use String as keys to eliminate duplicate trees. - LinkedHashMap aggregationTrees = new LinkedHashMap(); + Map aggregationTrees = new LinkedHashMap(); List wdwFns = new ArrayList(); for (int i = 0; i < selExpr.getChildCount(); ++i) { ASTNode function = (ASTNode) selExpr.getChild(i); @@ -659,7 +660,7 @@ public void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias, spec = new WindowingSpec(); qb.addDestToWindowingSpec(dest, spec); } - HashMap wExprsInDest = qb.getParseInfo().getWindowingExprsForClause(dest); + Map wExprsInDest = qb.getParseInfo().getWindowingExprsForClause(dest); int wColIdx = spec.getWindowExpressions() == null ? 0 : spec.getWindowExpressions().size(); WindowFunctionSpec wFnSpec = processWindowFunction(wdwFn, (ASTNode)wdwFn.getChild(wdwFn.getChildCount()-1)); @@ -888,7 +889,7 @@ private void doPhase1GetColumnAliasesFromSelect( * @throws SemanticException */ private void doPhase1GetAllAggregations(ASTNode expressionTree, - HashMap aggregations, List wdwFns, + Map aggregations, List wdwFns, ASTNode wndParent) throws SemanticException { int exprTokenType = expressionTree.getToken().getType(); if(exprTokenType == HiveParser.TOK_SUBQUERY_EXPR) { @@ -945,7 +946,7 @@ private void doPhase1GetAllAggregations(ASTNode expressionTree, } private List doPhase1GetDistinctFuncExprs( - HashMap aggregationTrees) throws SemanticException { + Map aggregationTrees) throws SemanticException { List exprs = new ArrayList(); for (Map.Entry entry : aggregationTrees.entrySet()) { ASTNode value = entry.getValue(); @@ -957,7 +958,7 @@ private void doPhase1GetAllAggregations(ASTNode expressionTree, return exprs; } - public static String generateErrorMessage(ASTNode ast, String message) { + static String generateErrorMessage(ASTNode ast, String message) { StringBuilder sb = new StringBuilder(); if (ast == null) { sb.append(message).append(". Cannot tell the position of null AST."); @@ -1003,7 +1004,7 @@ protected void setAST(ASTNode newAST) { return new int[] {aliasIndex, propsIndex, tsampleIndex, ssampleIndex}; } - String findSimpleTableName(ASTNode tabref, int aliasIndex) throws SemanticException { + private String findSimpleTableName(ASTNode tabref, int aliasIndex) throws SemanticException { assert tabref.getType() == HiveParser.TOK_TABREF; ASTNode tableTree = (ASTNode) (tabref.getChild(0)); @@ -1056,7 +1057,7 @@ private String processTable(QB qb, ASTNode tabref) throws SemanticException { } if (tsampleIndex >= 0) { ASTNode sampleClause = (ASTNode) tabref.getChild(tsampleIndex); - ArrayList sampleCols = new ArrayList(); + List sampleCols = new ArrayList(); if (sampleClause.getChildCount() > 2) { for (int i = 2; i < sampleClause.getChildCount(); i++) { sampleCols.add((ASTNode) sampleClause.getChild(i)); @@ -1267,8 +1268,8 @@ private void addCTEAsSubQuery(QB qb, String cteName, String cteAlias) } @Override - public HashSet getAllInputs() { - HashSet readEntities = new HashSet(getInputs()); + public Set getAllInputs() { + Set readEntities = new HashSet(getInputs()); for (CTEClause cte : rootClause.asExecutionOrder()) { if (cte.source != null) { readEntities.addAll(cte.source.getInputs()); @@ -1278,8 +1279,8 @@ private void addCTEAsSubQuery(QB qb, String cteName, String cteAlias) } @Override - public HashSet getAllOutputs() { - HashSet writeEntities = new HashSet(getOutputs()); + public Set getAllOutputs() { + Set writeEntities = new HashSet(getOutputs()); for (CTEClause cte : rootClause.asExecutionOrder()) { if (cte.source != null) { writeEntities.addAll(cte.source.getOutputs()); @@ -1544,7 +1545,7 @@ private String processLateralView(QB qb, ASTNode lateralView) * @throws SemanticException */ @SuppressWarnings({"fallthrough", "nls"}) - public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1, PlannerContext plannerCtx) + boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1, PlannerContext plannerCtx) throws SemanticException { boolean phase1Result = true; @@ -1570,8 +1571,7 @@ public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1, PlannerContext plan queryProperties.setUsesScript(true); } - LinkedHashMap aggregations = doPhase1GetAggregationsFromSelect(ast, - qb, ctx_1.dest); + Map aggregations = doPhase1GetAggregationsFromSelect(ast, qb, ctx_1.dest); doPhase1GetColumnAliasesFromSelect(ast, qbp, ctx_1.dest); qbp.setAggregationExprsForClause(ctx_1.dest, aggregations); qbp.setDistinctFuncExprsForClause(ctx_1.dest, @@ -1807,7 +1807,7 @@ public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1, PlannerContext plan Tree partitions = tab.getChild(1); int childCount = partitions.getChildCount(); - HashMap partition = new HashMap(); + Map partition = new HashMap(); for (int i = 0; i < childCount; i++) { String partitionName = partitions.getChild(i).getChild(0).getText(); // Convert to lowercase for the comparison @@ -1987,7 +1987,7 @@ private void handleInsertStatementSpecPhase1(ASTNode ast, QBParseInfo qbp, Phase } } - public void getMaterializationMetadata(QB qb) throws SemanticException { + private void getMaterializationMetadata(QB qb) throws SemanticException { try { gatherCTEReferences(qb, rootClause); int threshold = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_CTE_MATERIALIZE_THRESHOLD); @@ -2047,11 +2047,11 @@ private void gatherCTEReferences(QB qb, CTEClause current) throws HiveException } } - public void getMetaData(QB qb) throws SemanticException { + void getMetaData(QB qb) throws SemanticException { getMetaData(qb, false); } - public void getMetaData(QB qb, boolean enableMaterialization) throws SemanticException { + private void getMetaData(QB qb, boolean enableMaterialization) throws SemanticException { try { if (enableMaterialization) { getMaterializationMetadata(qb); @@ -2666,8 +2666,8 @@ private String findAlias(ASTNode columnRef, @SuppressWarnings("nls") void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn, - ArrayList leftAliases, ArrayList rightAliases, - ArrayList fields, + List leftAliases, List rightAliases, + List fields, Map aliasToOpInfo) throws SemanticException { // String[] allAliases = joinTree.getAllAliases(); switch (condn.getToken().getType()) { @@ -2749,7 +2749,7 @@ void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn, leftAliases, rightAliases, null, aliasToOpInfo); } else if (condn.getChildCount() == 2) { - ArrayList fields1 = null; + List fields1 = null; // if it is a dot operator, remember the field name of the rhs of the // left semijoin if (joinTree.getNoSemiJoin() == false @@ -2959,7 +2959,7 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, List> filters = joinTree.getFilters(); + List> filters = joinTree.getFilters(); if (type == JoinType.LEFTOUTER || type == JoinType.FULLOUTER) { joinTree.addFilterMapping(cond.getLeft(), cond.getRight(), filters.get(0).size()); } @@ -3013,14 +3013,14 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, case HiveParser.EQUAL_NS: case HiveParser.EQUAL: ASTNode leftCondn = (ASTNode) joinCond.getChild(0); - ArrayList leftCondAl1 = new ArrayList(); - ArrayList leftCondAl2 = new ArrayList(); + List leftCondAl1 = new ArrayList(); + List leftCondAl2 = new ArrayList(); parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2, null, aliasToOpInfo); ASTNode rightCondn = (ASTNode) joinCond.getChild(1); - ArrayList rightCondAl1 = new ArrayList(); - ArrayList rightCondAl2 = new ArrayList(); + List rightCondAl1 = new ArrayList(); + List rightCondAl2 = new ArrayList(); parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1, rightCondAl2, null, aliasToOpInfo); @@ -3046,13 +3046,11 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, // Create all children int childrenBegin = (isFunction ? 1 : 0); - ArrayList> leftAlias = new ArrayList>( - joinCond.getChildCount() - childrenBegin); - ArrayList> rightAlias = new ArrayList>( - joinCond.getChildCount() - childrenBegin); + List> leftAlias = new ArrayList>(joinCond.getChildCount() - childrenBegin); + List> rightAlias = new ArrayList>(joinCond.getChildCount() - childrenBegin); for (int ci = 0; ci < joinCond.getChildCount() - childrenBegin; ci++) { - ArrayList left = new ArrayList(); - ArrayList right = new ArrayList(); + List left = new ArrayList(); + List right = new ArrayList(); leftAlias.add(left); rightAlias.add(right); } @@ -3064,7 +3062,7 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, } boolean leftAliasNull = true; - for (ArrayList left : leftAlias) { + for (List left : leftAlias) { if (left.size() != 0) { leftAliasNull = false; break; @@ -3072,7 +3070,7 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, } boolean rightAliasNull = true; - for (ArrayList right : rightAlias) { + for (List right : rightAlias) { if (right.size() != 0) { rightAliasNull = false; break; @@ -3118,19 +3116,18 @@ private void extractJoinCondsFromWhereClause(QBJoinTree joinTree, QB qb, String case HiveParser.EQUAL: ASTNode leftCondn = (ASTNode) predicate.getChild(0); - ArrayList leftCondAl1 = new ArrayList(); - ArrayList leftCondAl2 = new ArrayList(); + List leftCondAl1 = new ArrayList(); + List leftCondAl2 = new ArrayList(); try { - parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2, - null, aliasToOpInfo); + parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2, null, aliasToOpInfo); } catch(SemanticException se) { // suppress here; if it is a real issue will get caught in where clause handling. return; } ASTNode rightCondn = (ASTNode) predicate.getChild(1); - ArrayList rightCondAl1 = new ArrayList(); - ArrayList rightCondAl2 = new ArrayList(); + List rightCondAl1 = new ArrayList(); + List rightCondAl2 = new ArrayList(); try { parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1, rightCondAl2, null, aliasToOpInfo); @@ -3171,7 +3168,7 @@ private void extractJoinCondsFromWhereClause(QBJoinTree joinTree, QB qb, String } @SuppressWarnings("nls") - public Operator putOpInsertMap(Operator op, + Operator putOpInsertMap(Operator op, RowResolver rr) { OpParseContext ctx = new OpParseContext(rr); opParseCtx.put(op, ctx); @@ -3573,7 +3570,7 @@ private Operator genNotNullFilterForJoinSourcePlan(QB qb, Operator input, @SuppressWarnings("nls") // TODO: make aliases unique, otherwise needless rewriting takes place Integer genColListRegex(String colRegex, String tabAlias, ASTNode sel, - ArrayList col_list, HashSet excludeCols, RowResolver input, + List col_list, Set excludeCols, RowResolver input, RowResolver colSrcRR, Integer pos, RowResolver output, List aliases, boolean ensureUniqueCols) throws SemanticException { @@ -3614,9 +3611,9 @@ Integer genColListRegex(String colRegex, String tabAlias, ASTNode sel, // We got using() clause in previous join. Need to generate select list as // per standard. For * we will have joining columns first non-repeated // followed by other columns. - HashMap leftMap = colSrcRR.getFieldMap(colSrcRR.getNamedJoinInfo().getAliases().get(0)); - HashMap rightMap = colSrcRR.getFieldMap(colSrcRR.getNamedJoinInfo().getAliases().get(1)); - HashMap chosenMap = null; + Map leftMap = colSrcRR.getFieldMap(colSrcRR.getNamedJoinInfo().getAliases().get(0)); + Map rightMap = colSrcRR.getFieldMap(colSrcRR.getNamedJoinInfo().getAliases().get(1)); + Map chosenMap = null; if (colSrcRR.getNamedJoinInfo().getHiveJoinType() != JoinType.RIGHTOUTER) { chosenMap = leftMap; } else { @@ -3673,7 +3670,7 @@ Integer genColListRegex(String colRegex, String tabAlias, ASTNode sel, } } for (String alias : aliases) { - HashMap fMap = colSrcRR.getFieldMap(alias); + Map fMap = colSrcRR.getFieldMap(alias); if (fMap == null) { continue; } @@ -3791,7 +3788,7 @@ private String getScriptArgs(String cmd) { return (end == -1) ? "" : cmd.substring(end, cmd.length()); } - static int getPositionFromInternalName(String internalName) { + private static int getPositionFromInternalName(String internalName) { return HiveConf.getPositionFromInternalName(internalName); } @@ -3905,7 +3902,7 @@ private void failIfColAliasExists(Set nameSet, String name) private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) throws SemanticException { // If there is no "AS" clause, the output schema will be "key,value" - ArrayList outputCols = new ArrayList(); + List outputCols = new ArrayList(); int inputSerDeNum = 1, inputRecordWriterNum = 2; int outputSerDeNum = 4, outputRecordReaderNum = 5; int outputColsNum = 6; @@ -3988,8 +3985,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) StringBuilder inpColumns = new StringBuilder(); StringBuilder inpColumnTypes = new StringBuilder(); - ArrayList inputSchema = opParseCtx.get(input).getRowResolver() - .getColumnInfos(); + List inputSchema = opParseCtx.get(input).getRowResolver().getColumnInfos(); for (int i = 0; i < inputSchema.size(); ++i) { if (i != 0) { inpColumns.append(","); @@ -4119,7 +4115,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) } } - protected List getGroupingSetsForRollup(int size) { + private List getGroupingSetsForRollup(int size) { List groupingSetKeys = new ArrayList(); for (int i = 0; i <= size; i++) { groupingSetKeys.add((1L << i) - 1); @@ -4127,7 +4123,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) return groupingSetKeys; } - protected List getGroupingSetsForCube(int size) { + private List getGroupingSetsForCube(int size) { long count = 1L << size; List results = new ArrayList(); for (long i = 0; i < count; ++i) { @@ -4159,7 +4155,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) return Pair.of(groupByExprs, groupingSets); } - protected List getGroupingSets(List groupByExpr, QBParseInfo parseInfo, + private List getGroupingSets(List groupByExpr, QBParseInfo parseInfo, String dest) throws SemanticException { Map exprPos = new HashMap(); for (int i = 0; i < groupByExpr.size(); ++i) { @@ -4209,7 +4205,7 @@ public static long setBit(long bitmap, int bitIdx) { return bitmap | (1L << bitIdx); } - public static long unsetBit(long bitmap, int bitIdx) { + private static long unsetBit(long bitmap, int bitIdx) { return bitmap & ~(1L << bitIdx); } @@ -4267,7 +4263,7 @@ protected boolean isSelectDistinct(ASTNode expr) { return expr.getType() == HiveParser.TOK_SELECTDI; } - protected boolean isAggregateInSelect(Node node, Collection aggregateFunction) { + private boolean isAggregateInSelect(Node node, Collection aggregateFunction) { if (node.getChildren() == null) { return false; } @@ -4392,7 +4388,7 @@ static boolean isRegex(String pattern, HiveConf conf) { LOG.debug("tree: " + selExprList.toStringTree()); } - ArrayList col_list = new ArrayList(); + List col_list = new ArrayList(); RowResolver out_rwsch = new RowResolver(); ASTNode trfm = null; Integer pos = Integer.valueOf(0); @@ -4423,7 +4419,7 @@ static boolean isRegex(String pattern, HiveConf conf) { // the lack of a special token. boolean isUDTF = false; String udtfTableAlias = null; - ArrayList udtfColAliases = new ArrayList(); + List udtfColAliases = new ArrayList(); ASTNode udtfExpr = (ASTNode) selExprList.getChild(posn).getChild(0); GenericUDTF genericUDTF = null; @@ -4631,7 +4627,7 @@ static boolean isRegex(String pattern, HiveConf conf) { out_rwsch = handleInsertStatementSpec(col_list, dest, out_rwsch, qb, selExprList); - ArrayList columnNames = new ArrayList(); + List columnNames = new ArrayList(); Map colExprMap = new HashMap(); for (int i = 0; i < col_list.size(); i++) { String outputCol = getColumnInternalName(i); @@ -4649,8 +4645,7 @@ static boolean isRegex(String pattern, HiveConf conf) { } if (isUDTF) { - output = genUDTFPlan(genericUDTF, udtfTableAlias, udtfColAliases, qb, - output, outerLV); + output = genUDTFPlan(genericUDTF, udtfTableAlias, udtfColAliases, qb, output, outerLV); } if (LOG.isDebugEnabled()) { @@ -4661,7 +4656,7 @@ static boolean isRegex(String pattern, HiveConf conf) { private RowResolver getColForInsertStmtSpec(Map targetCol2Projection, final Table target, Map targetCol2ColumnInfo, int colListPos, - List targetTableColTypes, ArrayList new_col_list, + List targetTableColTypes, List new_col_list, List targetTableColNames) throws SemanticException { RowResolver newOutputRR = new RowResolver(); @@ -4728,7 +4723,7 @@ private RowResolver getColForInsertStmtSpec(Map targetCol2 * @see #handleInsertStatementSpecPhase1(ASTNode, QBParseInfo, org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.Phase1Ctx) * @throws SemanticException */ - public RowResolver handleInsertStatementSpec(List col_list, String dest, + RowResolver handleInsertStatementSpec(List col_list, String dest, RowResolver outputRR, QB qb, ASTNode selExprList) throws SemanticException { //(z,x) @@ -4760,7 +4755,7 @@ public RowResolver handleInsertStatementSpec(List col_list, String throw new SemanticException(generateErrorMessage(selExprList, "No table/partition found in QB metadata for dest='" + dest + "'")); } - ArrayList new_col_list = new ArrayList(); + List new_col_list = new ArrayList(); colListPos = 0; List targetTableCols = target != null ? target.getCols() : partition.getCols(); List targetTableColNames = new ArrayList(); @@ -4814,7 +4809,7 @@ boolean autogenColAliasPrfxIncludeFuncName() { * Class to store GenericUDAF related information. */ public static class GenericUDAFInfo { - public ArrayList convertedParameters; + public List convertedParameters; public GenericUDAFEvaluator genericUDAFEvaluator; public TypeInfo returnType; } @@ -4822,8 +4817,8 @@ boolean autogenColAliasPrfxIncludeFuncName() { /** * Convert exprNodeDesc array to ObjectInspector array. */ - static ArrayList getWritableObjectInspector(ArrayList exprs) { - ArrayList result = new ArrayList(); + static List getWritableObjectInspector(List exprs) { + List result = new ArrayList(); for (ExprNodeDesc expr : exprs) { result.add(expr.getWritableObjectInspector()); } @@ -4835,11 +4830,10 @@ boolean autogenColAliasPrfxIncludeFuncName() { * for each GroupBy aggregation. */ public static GenericUDAFEvaluator getGenericUDAFEvaluator(String aggName, - ArrayList aggParameters, ASTNode aggTree, + List aggParameters, ASTNode aggTree, boolean isDistinct, boolean isAllColumns) throws SemanticException { - ArrayList originalParameterTypeInfos = - getWritableObjectInspector(aggParameters); + List originalParameterTypeInfos = getWritableObjectInspector(aggParameters); GenericUDAFEvaluator result = FunctionRegistry.getGenericUDAFEvaluator( aggName, originalParameterTypeInfos, isDistinct, isAllColumns); if (null == result) { @@ -4863,7 +4857,7 @@ public static GenericUDAFEvaluator getGenericUDAFEvaluator(String aggName, * when the UDAF is not found or has problems. */ public static GenericUDAFInfo getGenericUDAFInfo(GenericUDAFEvaluator evaluator, - GenericUDAFEvaluator.Mode emode, ArrayList aggParameters) + GenericUDAFEvaluator.Mode emode, List aggParameters) throws SemanticException { GenericUDAFInfo r = new GenericUDAFInfo(); @@ -4874,7 +4868,7 @@ public static GenericUDAFInfo getGenericUDAFInfo(GenericUDAFEvaluator evaluator, // set r.returnType ObjectInspector returnOI = null; try { - ArrayList aggOIs = getWritableObjectInspector(aggParameters); + List aggOIs = getWritableObjectInspector(aggParameters); ObjectInspector[] aggOIArray = new ObjectInspector[aggOIs.size()]; for (int ii = 0; ii < aggOIs.size(); ++ii) { aggOIArray[ii] = aggOIs.get(ii); @@ -4980,9 +4974,9 @@ private Operator genGroupByPlanGroupByOperator(QBParseInfo parseInfo, .get(input).getRowResolver(); RowResolver groupByOutputRowResolver = new RowResolver(); groupByOutputRowResolver.setIsExprResolver(true); - ArrayList groupByKeys = new ArrayList(); - ArrayList aggregations = new ArrayList(); - ArrayList outputColumnNames = new ArrayList(); + List groupByKeys = new ArrayList(); + List aggregations = new ArrayList(); + List outputColumnNames = new ArrayList(); Map colExprMap = new HashMap(); List grpByExprs = getGroupByForClause(parseInfo, dest); for (int i = 0; i < grpByExprs.size(); ++i) { @@ -5004,7 +4998,7 @@ private Operator genGroupByPlanGroupByOperator(QBParseInfo parseInfo, colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1)); } // For each aggregation - HashMap aggregationTrees = parseInfo + Map aggregationTrees = parseInfo .getAggregationExprsForClause(dest); assert (aggregationTrees != null); // get the last colName for the reduce KEY @@ -5025,7 +5019,7 @@ private Operator genGroupByPlanGroupByOperator(QBParseInfo parseInfo, boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR; // Convert children to aggParameters - ArrayList aggParameters = new ArrayList(); + List aggParameters = new ArrayList(); // 0 is the function name for (int i = 1; i < value.getChildCount(); i++) { ASTNode paraExpr = (ASTNode) value.getChild(i); @@ -5067,8 +5061,7 @@ private Operator genGroupByPlanGroupByOperator(QBParseInfo parseInfo, GenericUDAFEvaluator genericUDAFEvaluator = getGenericUDAFEvaluator( aggName, aggParameters, value, isDistinct, isAllColumns); assert (genericUDAFEvaluator != null); - GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode, - aggParameters); + GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode, aggParameters); aggregations.add(new AggregationDesc(aggName.toLowerCase(), udaf.genericUDAFEvaluator, udaf.convertedParameters, isDistinct, amode)); @@ -5188,13 +5181,13 @@ private Operator genGroupByPlanGroupByOperator1(QBParseInfo parseInfo, List groupingSets, boolean groupingSetsPresent, boolean groupingSetsNeedAdditionalMRJob) throws SemanticException { - ArrayList outputColumnNames = new ArrayList(); + List outputColumnNames = new ArrayList(); RowResolver groupByInputRowResolver = opParseCtx .get(reduceSinkOperatorInfo).getRowResolver(); RowResolver groupByOutputRowResolver = new RowResolver(); groupByOutputRowResolver.setIsExprResolver(true); - ArrayList groupByKeys = new ArrayList(); - ArrayList aggregations = new ArrayList(); + List groupByKeys = new ArrayList(); + List aggregations = new ArrayList(); List grpByExprs = getGroupByForClause(parseInfo, dest); Map colExprMap = new HashMap(); for (int i = 0; i < grpByExprs.size(); ++i) { @@ -5250,7 +5243,7 @@ private Operator genGroupByPlanGroupByOperator1(QBParseInfo parseInfo, } } - HashMap aggregationTrees = parseInfo + Map aggregationTrees = parseInfo .getAggregationExprsForClause(dest); // get the last colName for the reduce KEY // it represents the column name corresponding to distinct aggr, if any @@ -5269,7 +5262,7 @@ private Operator genGroupByPlanGroupByOperator1(QBParseInfo parseInfo, for (Map.Entry entry : aggregationTrees.entrySet()) { ASTNode value = entry.getValue(); String aggName = unescapeIdentifier(value.getChild(0).getText()); - ArrayList aggParameters = new ArrayList(); + List aggParameters = new ArrayList(); boolean isDistinct = (value.getType() == HiveParser.TOK_FUNCTIONDI); containsDistinctAggr = containsDistinctAggr || isDistinct; @@ -5422,9 +5415,9 @@ private Operator genGroupByPlanMapGroupByOperator(QB qb, QBParseInfo parseInfo = qb.getParseInfo(); RowResolver groupByOutputRowResolver = new RowResolver(); groupByOutputRowResolver.setIsExprResolver(true); - ArrayList groupByKeys = new ArrayList(); - ArrayList outputColumnNames = new ArrayList(); - ArrayList aggregations = new ArrayList(); + List groupByKeys = new ArrayList(); + List outputColumnNames = new ArrayList(); + List aggregations = new ArrayList(); Map colExprMap = new HashMap(); for (int i = 0; i < grpByExprs.size(); ++i) { ASTNode grpbyExpr = grpByExprs.get(i); @@ -5486,15 +5479,14 @@ private Operator genGroupByPlanMapGroupByOperator(QB qb, } // For each aggregation - HashMap aggregationTrees = parseInfo - .getAggregationExprsForClause(dest); + Map aggregationTrees = parseInfo.getAggregationExprsForClause(dest); assert (aggregationTrees != null); boolean containsDistinctAggr = false; for (Map.Entry entry : aggregationTrees.entrySet()) { ASTNode value = entry.getValue(); String aggName = unescapeIdentifier(value.getChild(0).getText()); - ArrayList aggParameters = new ArrayList(); + List aggParameters = new ArrayList(); // 0 is the function name for (int i = 1; i < value.getChildCount(); i++) { ASTNode paraExpr = (ASTNode) value.getChild(i); @@ -5581,7 +5573,7 @@ private ReduceSinkOperator genGroupByPlanReduceSinkOperator(QB qb, List outputKeyColumnNames = new ArrayList(); List outputValueColumnNames = new ArrayList(); - ArrayList reduceKeys = getReduceKeysForReduceSink(grpByExprs, + List reduceKeys = getReduceKeysForReduceSink(grpByExprs, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames, colExprMap); @@ -5607,9 +5599,8 @@ private ReduceSinkOperator genGroupByPlanReduceSinkOperator(QB qb, reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames, colExprMap); - ArrayList reduceValues = new ArrayList(); - HashMap aggregationTrees = parseInfo - .getAggregationExprsForClause(dest); + List reduceValues = new ArrayList(); + Map aggregationTrees = parseInfo.getAggregationExprsForClause(dest); if (!mapAggrDone) { getReduceValuesForReduceSinkNoMapAgg(parseInfo, dest, reduceSinkInputRowResolver, @@ -5649,12 +5640,12 @@ private ReduceSinkOperator genGroupByPlanReduceSinkOperator(QB qb, return rsOp; } - private ArrayList getReduceKeysForReduceSink(List grpByExprs, + private List getReduceKeysForReduceSink(List grpByExprs, RowResolver reduceSinkInputRowResolver, RowResolver reduceSinkOutputRowResolver, List outputKeyColumnNames, Map colExprMap) throws SemanticException { - ArrayList reduceKeys = new ArrayList(); + List reduceKeys = new ArrayList(); for (int i = 0; i < grpByExprs.size(); ++i) { ASTNode grpbyExpr = grpByExprs.get(i); @@ -5740,10 +5731,9 @@ private boolean isConsistentWithinQuery(ExprNodeDesc expr) throws SemanticExcept private void getReduceValuesForReduceSinkNoMapAgg(QBParseInfo parseInfo, String dest, RowResolver reduceSinkInputRowResolver, RowResolver reduceSinkOutputRowResolver, - List outputValueColumnNames, ArrayList reduceValues, + List outputValueColumnNames, List reduceValues, Map colExprMap) throws SemanticException { - HashMap aggregationTrees = parseInfo - .getAggregationExprsForClause(dest); + Map aggregationTrees = parseInfo.getAggregationExprsForClause(dest); // Put parameters to aggregations in reduceValues for (Map.Entry entry : aggregationTrees.entrySet()) { @@ -5787,7 +5777,7 @@ private ReduceSinkOperator genCommonGroupByPlanReduceSinkOperator(QB qb, List outputValueColumnNames = new ArrayList(); List grpByExprs = getGroupByForClause(parseInfo, dest); - ArrayList reduceKeys = getReduceKeysForReduceSink(grpByExprs, + List reduceKeys = getReduceKeysForReduceSink(grpByExprs, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames, colExprMap); @@ -5797,7 +5787,7 @@ private ReduceSinkOperator genCommonGroupByPlanReduceSinkOperator(QB qb, List reduceValues = new ArrayList(); + List reduceValues = new ArrayList(); // The dests can have different non-distinct aggregations, so we have to iterate over all of // them @@ -5905,8 +5895,8 @@ private Operator genGroupByPlanReduceSinkOperator2MR(QBParseInfo parseInfo, RowResolver reduceSinkOutputRowResolver2 = new RowResolver(); reduceSinkOutputRowResolver2.setIsExprResolver(true); Map colExprMap = new HashMap(); - ArrayList reduceKeys = new ArrayList(); - ArrayList outputColumnNames = new ArrayList(); + List reduceKeys = new ArrayList(); + List outputColumnNames = new ArrayList(); // Get group-by keys and store in reduceKeys List grpByExprs = getGroupByForClause(parseInfo, dest); for (int i = 0; i < grpByExprs.size(); ++i) { @@ -5937,10 +5927,9 @@ private Operator genGroupByPlanReduceSinkOperator2MR(QBParseInfo parseInfo, } // Get partial aggregation results and store in reduceValues - ArrayList reduceValues = new ArrayList(); + List reduceValues = new ArrayList(); int inputField = reduceKeys.size(); - HashMap aggregationTrees = parseInfo - .getAggregationExprsForClause(dest); + Map aggregationTrees = parseInfo.getAggregationExprsForClause(dest); for (Map.Entry entry : aggregationTrees.entrySet()) { String field = getColumnInternalName(inputField); ASTNode t = entry.getValue(); @@ -5993,11 +5982,11 @@ private Operator genGroupByPlanGroupByOperator2MR(QBParseInfo parseInfo, reduceSinkOperatorInfo2).getRowResolver(); RowResolver groupByOutputRowResolver2 = new RowResolver(); groupByOutputRowResolver2.setIsExprResolver(true); - ArrayList groupByKeys = new ArrayList(); - ArrayList aggregations = new ArrayList(); + List groupByKeys = new ArrayList(); + List aggregations = new ArrayList(); Map colExprMap = new HashMap(); List grpByExprs = getGroupByForClause(parseInfo, dest); - ArrayList outputColumnNames = new ArrayList(); + List outputColumnNames = new ArrayList(); for (int i = 0; i < grpByExprs.size(); ++i) { ASTNode grpbyExpr = grpByExprs.get(i); ColumnInfo exprInfo = groupByInputRowResolver2.getExpression(grpbyExpr); @@ -6029,11 +6018,10 @@ private Operator genGroupByPlanGroupByOperator2MR(QBParseInfo parseInfo, colExprMap); } - HashMap aggregationTrees = parseInfo - .getAggregationExprsForClause(dest); + Map aggregationTrees = parseInfo.getAggregationExprsForClause(dest); boolean containsDistinctAggr = false; for (Map.Entry entry : aggregationTrees.entrySet()) { - ArrayList aggParameters = new ArrayList(); + List aggParameters = new ArrayList(); ASTNode value = entry.getValue(); ColumnInfo paraExprInfo = groupByInputRowResolver2.getExpression(value); if (paraExprInfo == null) { @@ -6768,7 +6756,7 @@ private int getReducersBucketing(int totalFiles, int maxReducers) { } private static class SortBucketRSCtx { - ArrayList partnCols; + List partnCols; boolean multiFileSpray; int numFiles; int totalFiles; @@ -6783,7 +6771,7 @@ public SortBucketRSCtx() { /** * @return the partnCols */ - public ArrayList getPartnCols() { + public List getPartnCols() { return partnCols; } @@ -6791,7 +6779,7 @@ public SortBucketRSCtx() { * @param partnCols * the partnCols to set */ - public void setPartnCols(ArrayList partnCols) { + public void setPartnCols(List partnCols) { this.partnCols = partnCols; } @@ -6852,9 +6840,9 @@ private Operator genBucketingSortingDest(String dest, Operator input, QB qb, // spray the data into multiple buckets. That way, we can support a very large // number of buckets without needing a very large number of reducers. boolean enforceBucketing = false; - ArrayList partnCols = new ArrayList<>(); - ArrayList sortCols = new ArrayList<>(); - ArrayList sortOrders = new ArrayList<>(); + List partnCols = new ArrayList<>(); + List sortCols = new ArrayList<>(); + List sortOrders = new ArrayList<>(); boolean multiFileSpray = false; int numFiles = 1; int totalFiles = 1; @@ -6935,7 +6923,7 @@ private Operator genBucketingSortingDest(String dest, Operator input, QB qb, private void genPartnCols(String dest, Operator input, QB qb, TableDesc table_desc, Table dest_tab, SortBucketRSCtx ctx) throws SemanticException { boolean enforceBucketing = false; - ArrayList partnColsNoConvert = new ArrayList(); + List partnColsNoConvert = new ArrayList(); if ((dest_tab.getNumBuckets() > 0)) { enforceBucketing = true; @@ -6995,13 +6983,13 @@ private Operator genMaterializedViewDataOrgPlan(List sortColInfos, L Set distributeKeys = distributeColInfos.stream() .map(ColumnInfo::getInternalName) .collect(Collectors.toSet()); - ArrayList keyCols = new ArrayList<>(); - ArrayList keyColNames = new ArrayList<>(); + List keyCols = new ArrayList<>(); + List keyColNames = new ArrayList<>(); StringBuilder order = new StringBuilder(); StringBuilder nullOrder = new StringBuilder(); - ArrayList valCols = new ArrayList<>(); - ArrayList valColNames = new ArrayList<>(); - ArrayList partCols = new ArrayList<>(); + List valCols = new ArrayList<>(); + List valColNames = new ArrayList<>(); + List partCols = new ArrayList<>(); Map colExprMap = new HashMap<>(); Map nameMapping = new HashMap<>(); // map _col0 to KEY._col0, etc @@ -7043,7 +7031,7 @@ private Operator genMaterializedViewDataOrgPlan(List sortColInfos, L keyColNames, distinctColumnIndices, valColNames, -1, partCols, -1, keyTable, valueTable, Operation.NOT_ACID); RowResolver rsRR = new RowResolver(); - ArrayList rsSignature = new ArrayList<>(); + List rsSignature = new ArrayList<>(); for (int index = 0; index < input.getSchema().getSignature().size(); index++) { ColumnInfo colInfo = new ColumnInfo(input.getSchema().getSignature().get(index)); String[] nm = inputRR.reverseLookup(colInfo.getInternalName()); @@ -7061,7 +7049,7 @@ private Operator genMaterializedViewDataOrgPlan(List sortColInfos, L // Create SEL operator RowResolver selRR = new RowResolver(); - ArrayList selSignature = new ArrayList<>(); + List selSignature = new ArrayList<>(); List columnExprs = new ArrayList<>(); List colNames = new ArrayList<>(); Map selColExprMap = new HashMap<>(); @@ -7087,8 +7075,7 @@ private Operator genMaterializedViewDataOrgPlan(List sortColInfos, L selColExprMap.put(colName, exprNodeDesc); } SelectDesc selConf = new SelectDesc(columnExprs, colNames); - result = putOpInsertMap(OperatorFactory.getAndMakeChild( - selConf, new RowSchema(selSignature), result), selRR); + result = putOpInsertMap(OperatorFactory.getAndMakeChild(selConf, new RowSchema(selSignature), result), selRR); result.setColumnExprMap(selColExprMap); return result; @@ -7097,7 +7084,7 @@ private Operator genMaterializedViewDataOrgPlan(List sortColInfos, L private void setStatsForNonNativeTable(String dbName, String tableName) throws SemanticException { String qTableName = DDLSemanticAnalyzer.getDotName(new String[] { dbName, tableName }); - HashMap mapProp = new HashMap<>(); + Map mapProp = new HashMap<>(); mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); AlterTableUnsetPropertiesDesc alterTblDesc = new AlterTableUnsetPropertiesDesc(qTableName, null, null, false, mapProp, false, null); @@ -7892,7 +7879,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) inputRR = opParseCtx.get(input).getRowResolver(); - ArrayList vecCol = new ArrayList(); + List vecCol = new ArrayList(); if (updating(dest) || deleting(dest)) { vecCol.add(new ColumnInfo(VirtualColumn.ROWID.getName(), VirtualColumn.ROWID.getTypeInfo(), @@ -8431,7 +8418,7 @@ These props are now enabled elsewhere (see commit diffs). It would be better in * Generate the conversion SelectOperator that converts the columns into the * types that are expected by the table_desc. */ - Operator genConversionSelectOperator(String dest, QB qb, Operator input, + private Operator genConversionSelectOperator(String dest, QB qb, Operator input, TableDesc table_desc, DynamicPartitionCtx dpCtx) throws SemanticException { StructObjectInspector oi = null; try { @@ -8446,8 +8433,7 @@ Operator genConversionSelectOperator(String dest, QB qb, Operator input, // Check column number List tableFields = oi.getAllStructFieldRefs(); boolean dynPart = HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING); - ArrayList rowFields = opParseCtx.get(input).getRowResolver() - .getColumnInfos(); + List rowFields = opParseCtx.get(input).getRowResolver().getColumnInfos(); int inColumnCnt = rowFields.size(); int outColumnCnt = tableFields.size(); if (dynPart && dpCtx != null) { @@ -8465,8 +8451,7 @@ Operator genConversionSelectOperator(String dest, QB qb, Operator input, // Check column types boolean converted = false; int columnNumber = tableFields.size(); - ArrayList expressions = new ArrayList( - columnNumber); + List expressions = new ArrayList(columnNumber); // MetadataTypedColumnsetSerDe does not need type conversions because it // does the conversion to String by itself. @@ -8533,7 +8518,7 @@ Operator genConversionSelectOperator(String dest, QB qb, Operator input, if (converted) { // add the select operator RowResolver rowResolver = new RowResolver(); - ArrayList colNames = new ArrayList(); + List colNames = new ArrayList(); Map colExprMap = new HashMap(); for (int i = 0; i < expressions.size(); i++) { String name = getColumnInternalName(i); @@ -8577,9 +8562,8 @@ private Operator genLimitPlan(String dest, QB qb, Operator input, int offset, in return limitMap; } - private Operator genUDTFPlan(GenericUDTF genericUDTF, - String outputTableAlias, ArrayList colAliases, QB qb, - Operator input, boolean outerLV) throws SemanticException { + private Operator genUDTFPlan(GenericUDTF genericUDTF, String outputTableAlias, List colAliases, QB qb, + Operator input, boolean outerLV) throws SemanticException { // No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY QBParseInfo qbp = qb.getParseInfo(); @@ -8610,10 +8594,10 @@ private Operator genUDTFPlan(GenericUDTF genericUDTF, // resulting output object inspector can be used to make the RowResolver // for the UDTF operator RowResolver selectRR = opParseCtx.get(input).getRowResolver(); - ArrayList inputCols = selectRR.getColumnInfos(); + List inputCols = selectRR.getColumnInfos(); // Create the object inspector for the input columns and initialize the UDTF - ArrayList colNames = new ArrayList(); + List colNames = new ArrayList(); ObjectInspector[] colOIs = new ObjectInspector[inputCols.size()]; for (int i = 0; i < inputCols.size(); i++) { colNames.add(inputCols.get(i).getInternalName()); @@ -8640,7 +8624,7 @@ private Operator genUDTFPlan(GenericUDTF genericUDTF, } // Generate the output column info's / row resolver using internal names. - ArrayList udtfCols = new ArrayList(); + List udtfCols = new ArrayList(); Iterator colAliasesIter = colAliases.iterator(); for (StructField sf : outputOI.getAllStructFieldRefs()) { @@ -8691,8 +8675,8 @@ private Operator genLimitMapRedPlan(String dest, QB qb, Operator input, return genLimitPlan(dest, qb, curr, offset, limit); } - private ArrayList getPartitionColsFromBucketCols(String dest, QB qb, Table tab, - TableDesc table_desc, Operator input, boolean convert) + private List getPartitionColsFromBucketCols(String dest, QB qb, Table tab, TableDesc table_desc, + Operator input, boolean convert) throws SemanticException { List tabBucketCols = tab.getBucketCols(); List tabCols = tab.getCols(); @@ -8716,7 +8700,7 @@ private Operator genLimitMapRedPlan(String dest, QB qb, Operator input, // We have to set up the bucketing columns differently for update and deletes, // as it is always using the ROW__ID column. - private ArrayList getPartitionColsFromBucketColsForUpdateDelete( + private List getPartitionColsFromBucketColsForUpdateDelete( Operator input, boolean convert) throws SemanticException { //return genConvertCol(dest, qb, tab, table_desc, input, Arrays.asList(0), convert); // In the case of update and delete the bucketing column is always the first column, @@ -8730,14 +8714,13 @@ private Operator genLimitMapRedPlan(String dest, QB qb, Operator input, if (convert) { column = ParseUtils.createConversionCast(column, TypeInfoFactory.intTypeInfo); } - ArrayList rlist = new ArrayList(1); + List rlist = new ArrayList(1); rlist.add(column); return rlist; } - private ArrayList genConvertCol(String dest, QB qb, Table tab, - TableDesc table_desc, Operator input, List posns, boolean convert) - throws SemanticException { + private List genConvertCol(String dest, QB qb, Table tab, TableDesc table_desc, Operator input, + List posns, boolean convert) throws SemanticException { StructObjectInspector oi = null; try { Deserializer deserializer = table_desc.getDeserializerClass() @@ -8749,12 +8732,11 @@ private Operator genLimitMapRedPlan(String dest, QB qb, Operator input, } List tableFields = oi.getAllStructFieldRefs(); - ArrayList rowFields = opParseCtx.get(input).getRowResolver() - .getColumnInfos(); + List rowFields = opParseCtx.get(input).getRowResolver().getColumnInfos(); // Check column type int columnNumber = posns.size(); - ArrayList expressions = new ArrayList(columnNumber); + List expressions = new ArrayList(columnNumber); for (Integer posn : posns) { ObjectInspector tableFieldOI = tableFields.get(posn).getFieldObjectInspector(); TypeInfo tableFieldTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(tableFieldOI); @@ -8785,7 +8767,7 @@ private Operator genLimitMapRedPlan(String dest, QB qb, Operator input, return expressions; } - private ArrayList getSortCols(String dest, QB qb, Table tab, TableDesc table_desc, + private List getSortCols(String dest, QB qb, Table tab, TableDesc table_desc, Operator input, boolean convert) throws SemanticException { List tabSortCols = tab.getSortCols(); @@ -8807,12 +8789,12 @@ private Operator genLimitMapRedPlan(String dest, QB qb, Operator input, return genConvertCol(dest, qb, tab, table_desc, input, posns, convert); } - private ArrayList getSortOrders(String dest, QB qb, Table tab, Operator input) + private List getSortOrders(String dest, QB qb, Table tab, Operator input) throws SemanticException { List tabSortCols = tab.getSortCols(); List tabCols = tab.getCols(); - ArrayList orders = new ArrayList(); + List orders = new ArrayList(); for (Order sortCol : tabSortCols) { for (FieldSchema tabCol : tabCols) { if (sortCol.getCol().equals(tabCol.getName())) { @@ -8836,7 +8818,7 @@ private Operator genReduceSinkPlan(String dest, QB qb, Operator input, if (partitionExprs == null) { partitionExprs = qb.getParseInfo().getDistributeByForClause(dest); } - ArrayList partCols = new ArrayList(); + List partCols = new ArrayList(); if (partitionExprs != null) { int ccount = partitionExprs.getChildCount(); for (int i = 0; i < ccount; ++i) { @@ -8862,7 +8844,7 @@ private Operator genReduceSinkPlan(String dest, QB qb, Operator input, } } } - ArrayList sortCols = new ArrayList(); + List sortCols = new ArrayList(); StringBuilder order = new StringBuilder(); StringBuilder nullOrder = new StringBuilder(); if (sortExprs != null) { @@ -8923,7 +8905,7 @@ private Operator genReduceSinkPlan(String dest, QB qb, Operator input, } private Operator genReduceSinkPlan(Operator input, - ArrayList partitionCols, ArrayList sortCols, + List partitionCols, List sortCols, String sortOrder, String nullOrder, int numReducers, AcidUtils.Operation acidOp) throws SemanticException { return genReduceSinkPlan(input, partitionCols, sortCols, sortOrder, nullOrder, numReducers, @@ -8931,8 +8913,7 @@ private Operator genReduceSinkPlan(Operator input, } @SuppressWarnings("nls") - private Operator genReduceSinkPlan(Operator input, - ArrayList partitionCols, ArrayList sortCols, + private Operator genReduceSinkPlan(Operator input, List partitionCols, List sortCols, String sortOrder, String nullOrder, int numReducers, AcidUtils.Operation acidOp, boolean pullConstants) throws SemanticException { @@ -8941,10 +8922,10 @@ private Operator genReduceSinkPlan(Operator input, Operator dummy = Operator.createDummy(); dummy.setParentOperators(Arrays.asList(input)); - ArrayList newSortCols = new ArrayList(); + List newSortCols = new ArrayList(); StringBuilder newSortOrder = new StringBuilder(); StringBuilder newNullOrder = new StringBuilder(); - ArrayList sortColsBack = new ArrayList(); + List sortColsBack = new ArrayList(); for (int i = 0; i < sortCols.size(); i++) { ExprNodeDesc sortCol = sortCols.get(i); // If we are not pulling constants, OR @@ -8960,13 +8941,13 @@ private Operator genReduceSinkPlan(Operator input, // For the generation of the values expression just get the inputs // signature and generate field expressions for those RowResolver rsRR = new RowResolver(); - ArrayList outputColumns = new ArrayList(); - ArrayList valueCols = new ArrayList(); - ArrayList valueColsBack = new ArrayList(); + List outputColumns = new ArrayList(); + List valueCols = new ArrayList(); + List valueColsBack = new ArrayList(); Map colExprMap = new HashMap(); - ArrayList constantCols = new ArrayList(); + List constantCols = new ArrayList(); - ArrayList columnInfos = inputRR.getColumnInfos(); + List columnInfos = inputRR.getColumnInfos(); int[] index = new int[columnInfos.size()]; for (int i = 0; i < index.length; i++) { @@ -9036,8 +9017,8 @@ private Operator genReduceSinkPlan(Operator input, interim.setColumnExprMap(colExprMap); RowResolver selectRR = new RowResolver(); - ArrayList selCols = new ArrayList(); - ArrayList selOutputCols = new ArrayList(); + List selCols = new ArrayList(); + List selOutputCols = new ArrayList(); Map selColExprMap = new HashMap(); Iterator constants = constantCols.iterator(); @@ -9079,20 +9060,19 @@ private Operator genReduceSinkPlan(Operator input, } private Operator genJoinOperatorChildren(QBJoinTree join, Operator left, - Operator[] right, HashSet omitOpts, ExprNodeDesc[][] joinKeys) throws SemanticException { + Operator[] right, Set omitOpts, ExprNodeDesc[][] joinKeys) throws SemanticException { RowResolver outputRR = new RowResolver(); - ArrayList outputColumnNames = new ArrayList(); + List outputColumnNames = new ArrayList(); // all children are base classes Operator[] rightOps = new Operator[right.length]; int outputPos = 0; Map reversedExprs = new HashMap(); - HashMap> exprMap = new HashMap>(); + Map> exprMap = new HashMap>(); Map colExprMap = new HashMap(); - HashMap> posToAliasMap = new HashMap>(); - HashMap> filterMap = - new HashMap>(); + Map> posToAliasMap = new HashMap>(); + Map> filterMap = new HashMap>(); // Only used for semijoin with residual predicates List topSelectInputColumns = new ArrayList<>(); @@ -9111,8 +9091,8 @@ private Operator genJoinOperatorChildren(QBJoinTree join, Operator left, int[] index = rs.getValueIndex(); - ArrayList valueDesc = new ArrayList(); - ArrayList filterDesc = new ArrayList(); + List valueDesc = new ArrayList(); + List filterDesc = new ArrayList(); Byte tag = (byte) rsDesc.getTag(); // check whether this input operator produces output @@ -9267,9 +9247,9 @@ private Operator genJoinReduceSinkChild(QB qb, ExprNodeDesc[] joinKeys, RowResolver inputRR = opParseCtx.get(child).getRowResolver(); RowResolver outputRR = new RowResolver(); - ArrayList outputColumns = new ArrayList(); - ArrayList reduceKeys = new ArrayList(); - ArrayList reduceKeysBack = new ArrayList(); + List outputColumns = new ArrayList(); + List reduceKeys = new ArrayList(); + List reduceKeysBack = new ArrayList(); // Compute join keys and store in reduceKeys for (ExprNodeDesc joinKey : joinKeys) { @@ -9278,8 +9258,8 @@ private Operator genJoinReduceSinkChild(QB qb, ExprNodeDesc[] joinKeys, } // Walk over the input row resolver and copy in the output - ArrayList reduceValues = new ArrayList(); - ArrayList reduceValuesBack = new ArrayList(); + List reduceValues = new ArrayList(); + List reduceValuesBack = new ArrayList(); Map colExprMap = new HashMap(); List columns = inputRR.getColumnInfos(); @@ -9373,7 +9353,7 @@ private Operator genJoinOperator(QB qb, QBJoinTree joinTree, } if ( joinSrcOp != null ) { - ArrayList filter = joinTree.getFiltersForPushing().get(0); + List filter = joinTree.getFiltersForPushing().get(0); for (ASTNode cond : filter) { joinSrcOp = genFilterPlan(qb, cond, joinSrcOp, false); } @@ -9382,7 +9362,7 @@ private Operator genJoinOperator(QB qb, QBJoinTree joinTree, String[] baseSrc = joinTree.getBaseSrc(); Operator[] srcOps = new Operator[baseSrc.length]; - HashSet omitOpts = null; // set of input to the join that should be + Set omitOpts = null; // set of input to the join that should be // omitted by the output int pos = 0; for (String src : baseSrc) { @@ -9391,7 +9371,7 @@ private Operator genJoinOperator(QB qb, QBJoinTree joinTree, // for left-semi join, generate an additional selection & group-by // operator before ReduceSink - ArrayList fields = joinTree.getRHSSemijoinColumns(src); + List fields = joinTree.getRHSSemijoinColumns(src); if (fields != null) { // the RHS table columns should be not be output from the join if (omitOpts == null) { @@ -9404,8 +9384,7 @@ private Operator genJoinOperator(QB qb, QBJoinTree joinTree, // generate a groupby operator (HASH mode) for a map-side partial // aggregation for semijoin - srcOps[pos++] = genMapGroupByForSemijoin(qb, fields, srcOp, - GroupByDesc.Mode.HASH); + srcOps[pos++] = genMapGroupByForSemijoin(qb, fields, srcOp, GroupByDesc.Mode.HASH); } else { srcOps[pos++] = srcOp; } @@ -9426,8 +9405,7 @@ private Operator genJoinOperator(QB qb, QBJoinTree joinTree, srcOps[i] = genJoinReduceSinkChild(qb, joinKeys[i], srcOps[i], srcs, joinTree.getNextTag()); } - Operator topOp = genJoinOperatorChildren(joinTree, - joinSrcOp, srcOps, omitOpts, joinKeys); + Operator topOp = genJoinOperatorChildren(joinTree, joinSrcOp, srcOps, omitOpts, joinKeys); JoinOperator joinOp; if (topOp instanceof JoinOperator) { joinOp = (JoinOperator) topOp; @@ -9464,12 +9442,12 @@ private Operator genJoinOperator(QB qb, QBJoinTree joinTree, * @return the selection operator. * @throws SemanticException */ - private Operator insertSelectForSemijoin(ArrayList fields, + private Operator insertSelectForSemijoin(List fields, Operator input) throws SemanticException { RowResolver inputRR = opParseCtx.get(input).getRowResolver(); - ArrayList colList = new ArrayList(); - ArrayList outputColumnNames = new ArrayList(); + List colList = new ArrayList(); + List outputColumnNames = new ArrayList(); Map colExprMap = new HashMap(); RowResolver outputRR = new RowResolver(); @@ -9520,16 +9498,15 @@ private Operator insertSelectForSemijoin(ArrayList fields, return output; } - private Operator genMapGroupByForSemijoin(QB qb, ArrayList fields, + private Operator genMapGroupByForSemijoin(QB qb, List fields, Operator input, GroupByDesc.Mode mode) throws SemanticException { - RowResolver groupByInputRowResolver = opParseCtx.get(input) - .getRowResolver(); + RowResolver groupByInputRowResolver = opParseCtx.get(input).getRowResolver(); RowResolver groupByOutputRowResolver = new RowResolver(); - ArrayList groupByKeys = new ArrayList(); - ArrayList outputColumnNames = new ArrayList(); - ArrayList aggregations = new ArrayList(); + List groupByKeys = new ArrayList(); + List outputColumnNames = new ArrayList(); + List aggregations = new ArrayList(); Map colExprMap = new HashMap(); for (int i = 0; i < fields.size(); ++i) { @@ -9658,12 +9635,12 @@ private void pushJoinFilters(QB qb, QBJoinTree joinTree, pushJoinFilters(qb, joinTree.getJoinSrc(), map); } } - ArrayList> filters = joinTree.getFiltersForPushing(); + List> filters = joinTree.getFiltersForPushing(); int pos = 0; for (String src : joinTree.getBaseSrc()) { if (src != null) { Operator srcOp = map.get(src); - ArrayList filter = filters.get(pos); + List filter = filters.get(pos); for (ASTNode cond : filter) { srcOp = genFilterPlan(qb, cond, srcOp, false); } @@ -9716,15 +9693,15 @@ private QBJoinTree genUniqueJoinTree(QB qb, ASTNode joinParseTree, QBJoinTree joinTree = new QBJoinTree(); joinTree.setNoOuterJoin(false); - joinTree.setExpressions(new ArrayList>()); - joinTree.setFilters(new ArrayList>()); - joinTree.setFiltersForPushing(new ArrayList>()); + joinTree.setExpressions(new ArrayList>()); + joinTree.setFilters(new ArrayList>()); + joinTree.setFiltersForPushing(new ArrayList>()); // Create joinTree structures to fill them up later - ArrayList rightAliases = new ArrayList(); - ArrayList leftAliases = new ArrayList(); - ArrayList baseSrc = new ArrayList(); - ArrayList preserved = new ArrayList(); + List rightAliases = new ArrayList(); + List leftAliases = new ArrayList(); + List baseSrc = new ArrayList(); + List preserved = new ArrayList(); boolean lastPreserved = false; int cols = -1; @@ -9750,8 +9727,7 @@ private QBJoinTree genUniqueJoinTree(QB qb, ASTNode joinParseTree, } else { rightAliases.add(alias); } - joinTree.getAliasToOpInfo().put( - getModifiedAlias(qb, alias), aliasToOpInfo.get(alias)); + joinTree.getAliasToOpInfo().put(getModifiedAlias(qb, alias), aliasToOpInfo.get(alias)); joinTree.setId(qb.getId()); baseSrc.add(alias); @@ -9767,9 +9743,9 @@ private QBJoinTree genUniqueJoinTree(QB qb, ASTNode joinParseTree, + "number of keys in UNIQUEJOIN"); } - ArrayList expressions = new ArrayList(); - ArrayList filt = new ArrayList(); - ArrayList filters = new ArrayList(); + List expressions = new ArrayList(); + List filt = new ArrayList(); + List filters = new ArrayList(); for (Node exp : child.getChildren()) { expressions.add((ASTNode) exp); @@ -9909,28 +9885,27 @@ private QBJoinTree genSQJoinTree(QB qb, ISubQueryJoinInfo subQuery, joinTree.addRHSSemijoin(rightalias); } - ArrayList> expressions = new ArrayList>(); + List> expressions = new ArrayList>(); expressions.add(new ArrayList()); expressions.add(new ArrayList()); joinTree.setExpressions(expressions); - ArrayList nullsafes = new ArrayList(); + List nullsafes = new ArrayList(); joinTree.setNullSafes(nullsafes); - ArrayList> filters = new ArrayList>(); + List> filters = new ArrayList>(); filters.add(new ArrayList()); filters.add(new ArrayList()); joinTree.setFilters(filters); joinTree.setFilterMap(new int[2][]); - ArrayList> filtersForPushing = - new ArrayList>(); + List> filtersForPushing = new ArrayList>(); filtersForPushing.add(new ArrayList()); filtersForPushing.add(new ArrayList()); joinTree.setFiltersForPushing(filtersForPushing); ASTNode joinCond = subQuery.getJoinConditionAST(); - ArrayList leftSrc = new ArrayList(); + List leftSrc = new ArrayList(); parseJoinCondition(joinTree, joinCond, leftSrc, aliasToOpInfo); if (leftSrc.size() == 1) { joinTree.setLeftAlias(leftSrc.get(0)); @@ -10045,28 +10020,27 @@ private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, assert false; } - ArrayList> expressions = new ArrayList>(); + List> expressions = new ArrayList>(); expressions.add(new ArrayList()); expressions.add(new ArrayList()); joinTree.setExpressions(expressions); - ArrayList nullsafes = new ArrayList(); + List nullsafes = new ArrayList(); joinTree.setNullSafes(nullsafes); - ArrayList> filters = new ArrayList>(); + List> filters = new ArrayList>(); filters.add(new ArrayList()); filters.add(new ArrayList()); joinTree.setFilters(filters); joinTree.setFilterMap(new int[2][]); - ArrayList> filtersForPushing = - new ArrayList>(); + List> filtersForPushing = new ArrayList>(); filtersForPushing.add(new ArrayList()); filtersForPushing.add(new ArrayList()); joinTree.setFiltersForPushing(filtersForPushing); ASTNode joinCond = (ASTNode) joinParseTree.getChild(2); - ArrayList leftSrc = new ArrayList(); + List leftSrc = new ArrayList(); parseJoinCondition(joinTree, joinCond, leftSrc, aliasToOpInfo); if (leftSrc.size() == 1) { joinTree.setLeftAlias(leftSrc.get(0)); @@ -10295,29 +10269,29 @@ private void mergeJoins(QB qb, QBJoinTree node, QBJoinTree target, int pos, int[ } target.setBaseSrc(baseSrc); - ArrayList> expr = target.getExpressions(); + List> expr = target.getExpressions(); for (int i = 0; i < nodeRightAliases.length; i++) { List nodeConds = node.getExpressions().get(i + 1); - ArrayList reordereNodeConds = new ArrayList(); + List reordereNodeConds = new ArrayList(); for(int k=0; k < tgtToNodeExprMap.length; k++) { reordereNodeConds.add(nodeConds.get(tgtToNodeExprMap[k])); } expr.add(reordereNodeConds); } - ArrayList nns = node.getNullSafes(); - ArrayList tns = target.getNullSafes(); + List nns = node.getNullSafes(); + List tns = target.getNullSafes(); for (int i = 0; i < tns.size(); i++) { tns.set(i, tns.get(i) & nns.get(i)); // any of condition contains non-NS, non-NS } - ArrayList> filters = target.getFilters(); + List> filters = target.getFilters(); for (int i = 0; i < nodeRightAliases.length; i++) { filters.add(node.getFilters().get(i + 1)); } if (node.getFilters().get(0).size() != 0) { - ArrayList filterPos = filters.get(pos); + List filterPos = filters.get(pos); filterPos.addAll(node.getFilters().get(0)); } @@ -10348,7 +10322,7 @@ private void mergeJoins(QB qb, QBJoinTree node, QBJoinTree target, int pos, int[ System.arraycopy(nmap, 1, newmap, tmap.length, nmap.length - 1); target.setFilterMap(newmap); - ArrayList> filter = target.getFiltersForPushing(); + List> filter = target.getFiltersForPushing(); for (int i = 0; i < nodeRightAliases.length; i++) { filter.add(node.getFiltersForPushing().get(i + 1)); } @@ -10439,8 +10413,8 @@ private void mergeJoins(QB qb, QBJoinTree node, QBJoinTree target, int pos, int[ return Pair.of(-1, null); } - ArrayList nodeCondn = node.getExpressions().get(0); - ArrayList targetCondn = null; + List nodeCondn = node.getExpressions().get(0); + List targetCondn = null; if (leftAlias == null || leftAlias.equals(target.getLeftAlias())) { targetCondn = target.getExpressions().get(0); @@ -10504,7 +10478,7 @@ boolean continueJoinMerge() { return true; } - boolean shouldMerge(final QBJoinTree node, final QBJoinTree target) { + private boolean shouldMerge(final QBJoinTree node, final QBJoinTree target) { boolean isNodeOuterJoin=false, isNodeSemiJoin=false, hasNodePostJoinFilters=false; boolean isTargetOuterJoin=false, isTargetSemiJoin=false, hasTargetPostJoinFilters=false; @@ -10599,7 +10573,7 @@ private void mergeJoinTree(QB qb) { curQBJTree = trees.get(i); if (curQBJTree != null) { if (prevQBJTree != null) { - ArrayList newCurLeftAliases = new ArrayList(); + List newCurLeftAliases = new ArrayList(); newCurLeftAliases.addAll(Arrays.asList(prevQBJTree.getLeftAliases())); newCurLeftAliases.addAll(Arrays.asList(prevQBJTree.getRightAliases())); curQBJTree @@ -10640,11 +10614,10 @@ private JoinType getType(JoinCond[] conds) { private Operator genSelectAllDesc(Operator input) throws SemanticException { OpParseContext inputCtx = opParseCtx.get(input); RowResolver inputRR = inputCtx.getRowResolver(); - ArrayList columns = inputRR.getColumnInfos(); - ArrayList colList = new ArrayList(); - ArrayList columnNames = new ArrayList(); - Map columnExprMap = - new HashMap(); + List columns = inputRR.getColumnInfos(); + List colList = new ArrayList(); + List columnNames = new ArrayList(); + Map columnExprMap = new HashMap(); for (int i = 0; i < columns.size(); i++) { ColumnInfo col = columns.get(i); colList.add(new ExprNodeColumnDesc(col, true)); @@ -10667,7 +10640,7 @@ private Operator genSelectAllDesc(Operator input) throws SemanticException { QBParseInfo qbp = qb.getParseInfo(); - TreeSet ks = new TreeSet(); + SortedSet ks = new TreeSet(); ks.addAll(qbp.getClauseNames()); List> commonGroupByDestGroups = new ArrayList>(); @@ -10767,7 +10740,7 @@ private Operator genSelectAllDesc(Operator input) throws SemanticException { return commonGroupByDestGroups; } - protected List determineSprayKeys(QBParseInfo qbp, String dest, + private List determineSprayKeys(QBParseInfo qbp, String dest, RowResolver inputRR) throws SemanticException { List sprayKeys = new ArrayList(); @@ -10833,7 +10806,7 @@ private Operator genBodyPlan(QB qb, Operator input, Map aliasT throws SemanticException { QBParseInfo qbp = qb.getParseInfo(); - TreeSet ks = new TreeSet(qbp.getClauseNames()); + SortedSet ks = new TreeSet(qbp.getClauseNames()); Map> inputs = createInputForDests(qb, input, ks); Operator curr = input; @@ -11081,8 +11054,8 @@ private Operator genUnionPlan(String unionalias, String leftalias, // This can be easily merged into 1 union RowResolver leftRR = opParseCtx.get(leftOp).getRowResolver(); RowResolver rightRR = opParseCtx.get(rightOp).getRowResolver(); - LinkedHashMap leftmap = leftRR.getFieldMap(leftalias); - LinkedHashMap rightmap = rightRR.getFieldMap(rightalias); + Map leftmap = leftRR.getFieldMap(leftalias); + Map rightmap = rightRR.getFieldMap(rightalias); // make sure the schemas of both sides are the same ASTNode tabref = qb.getAliases().isEmpty() ? null : qb.getParseInfo().getSrcForAlias(qb.getAliases().get(0)); @@ -11245,7 +11218,7 @@ private Operator genUnionPlan(String unionalias, String leftalias, String origInputAlias, RowResolver unionoutRR, String unionalias) throws SemanticException { - HashMap fieldMap = unionoutRR.getFieldMap(unionalias); + Map fieldMap = unionoutRR.getFieldMap(unionalias); Iterator oIter = origInputFieldMap.values().iterator(); Iterator uIter = fieldMap.values().iterator(); @@ -11336,7 +11309,7 @@ private ExprNodeDesc genSamplePredicate(TableSample ts, ExprNodeDesc intMaxExpr = new ExprNodeConstantDesc( TypeInfoFactory.intTypeInfo, Integer.valueOf(Integer.MAX_VALUE)); - ArrayList args = new ArrayList(); + List args = new ArrayList(); if (planExpr != null) { args.add(planExpr); } else if (useBucketCols) { @@ -11483,7 +11456,7 @@ private Operator genTablePlan(String alias, QB qb) throws SemanticException { tableScanOp.getConf().setTableSample(ts); int num = ts.getNumerator(); int den = ts.getDenominator(); - ArrayList sampleExprs = ts.getExprs(); + List sampleExprs = ts.getExprs(); // TODO: Do the type checking of the expressions List tabBucketCols = tab.getBucketCols(); @@ -11730,12 +11703,12 @@ private Operator genPlan(QB parent, QBExpr qbexpr) throws SemanticException { return null; } - public Operator genPlan(QB qb) throws SemanticException { + Operator genPlan(QB qb) throws SemanticException { return genPlan(qb, false); } @SuppressWarnings("nls") - public Operator genPlan(QB qb, boolean skipAmbiguityCheck) + private Operator genPlan(QB qb, boolean skipAmbiguityCheck) throws SemanticException { // First generate all the opInfos for the elements in the from clause @@ -11792,7 +11765,7 @@ public Operator genPlan(QB qb, boolean skipAmbiguityCheck) //After processing subqueries and source tables, process // partitioned table functions - HashMap ptfNodeToSpec = qb.getPTFNodeToSpec(); + Map ptfNodeToSpec = qb.getPTFNodeToSpec(); if ( ptfNodeToSpec != null ) { for(Entry entry : ptfNodeToSpec.entrySet()) { ASTNode ast = entry.getKey(); @@ -11941,15 +11914,14 @@ private Path createDummyFile() throws SemanticException { * @throws SemanticException */ - void genLateralViewPlans(Map aliasToOpInfo, QB qb) + private void genLateralViewPlans(Map aliasToOpInfo, QB qb) throws SemanticException { - Map> aliasToLateralViews = qb.getParseInfo() - .getAliasToLateralViews(); + Map> aliasToLateralViews = qb.getParseInfo().getAliasToLateralViews(); for (Entry e : aliasToOpInfo.entrySet()) { String alias = e.getKey(); // See if the alias has a lateral view. If so, chain the lateral view // operator on - ArrayList lateralViews = aliasToLateralViews.get(alias); + List lateralViews = aliasToLateralViews.get(alias); if (lateralViews != null) { Operator op = e.getValue(); @@ -12032,7 +12004,7 @@ private Operator genLateralViewPlan(QB qb, Operator op, ASTNode lateralViewTree) // names have to be changed to avoid conflicts RowResolver lateralViewRR = new RowResolver(); - ArrayList outputInternalColNames = new ArrayList(); + List outputInternalColNames = new ArrayList(); // For PPD, we need a column to expression map so that during the walk, @@ -12069,7 +12041,7 @@ private Operator genLateralViewPlan(QB qb, Operator op, ASTNode lateralViewTree) * the same order as in the dest row resolver */ private void LVmergeRowResolvers(RowResolver source, RowResolver dest, - Map colExprMap, ArrayList outputInternalColNames) { + Map colExprMap, List outputInternalColNames) { for (ColumnInfo c : source.getColumnInfos()) { String internalName = getColumnInternalName(outputInternalColNames.size()); outputInternalColNames.add(internalName); @@ -12084,7 +12056,7 @@ private void LVmergeRowResolvers(RowResolver source, RowResolver dest, } @SuppressWarnings("nls") - public Phase1Ctx initPhase1Ctx() { + Phase1Ctx initPhase1Ctx() { Phase1Ctx ctx_1 = new Phase1Ctx(); ctx_1.nextNum = 0; @@ -12182,7 +12154,7 @@ private static void walkASTAndQualifyNames(ASTNode ast, // Walk through the AST. // Replace all TOK_TABREF with fully qualified table name, if it is not already fully qualified. - protected String rewriteQueryWithQualifiedNames(ASTNode ast, TokenRewriteStream tokenRewriteStream) + private String rewriteQueryWithQualifiedNames(ASTNode ast, TokenRewriteStream tokenRewriteStream) throws SemanticException { UnparseTranslator unparseTranslator = new UnparseTranslator(conf); unparseTranslator.enable(); @@ -12356,7 +12328,7 @@ private static void extractColumnInfos(Table table, List colNames, List< // the table needs to be masked or filtered. // For the replacement, we leverage the methods that are used for // unparseTranslator. - protected ASTNode rewriteASTWithMaskAndFilter(TableMask tableMask, ASTNode ast, TokenRewriteStream tokenRewriteStream, + private ASTNode rewriteASTWithMaskAndFilter(TableMask tableMask, ASTNode ast, TokenRewriteStream tokenRewriteStream, Context ctx, Hive db, Map tabNameToTabObject, Set ignoredTokens) throws SemanticException { // 1. collect information about CTE if there is any. @@ -12493,7 +12465,7 @@ else if(ast.getChild(0).getType() == HiveParser.TOK_FALSE) { return true; } - public void getHintsFromQB(QB qb, List hints) { + void getHintsFromQB(QB qb, List hints) { if (qb.getParseInfo().getHints() != null) { hints.add(qb.getParseInfo().getHints()); } @@ -12505,7 +12477,7 @@ public void getHintsFromQB(QB qb, List hints) { } } - public void getHintsFromQB(QBExpr qbExpr, List hints) { + private void getHintsFromQB(QBExpr qbExpr, List hints) { QBExpr qbExpr1 = qbExpr.getQBExpr1(); QBExpr qbExpr2 = qbExpr.getQBExpr2(); QB qb = qbExpr.getQB(); @@ -12731,7 +12703,7 @@ void analyzeInternal(ASTNode ast, PlannerContextFactory pcf) throws SemanticExce if (postExecHooks.contains("org.apache.hadoop.hive.ql.hooks.PostExecutePrinter") || postExecHooks.contains("org.apache.hadoop.hive.ql.hooks.LineageLogger") || postExecHooks.contains("org.apache.atlas.hive.hook.HiveHook")) { - ArrayList transformations = new ArrayList(); + List transformations = new ArrayList(); transformations.add(new HiveOpConverterPostProc()); transformations.add(new Generator(postExecHooks)); for (Transform t : transformations) { @@ -12832,7 +12804,7 @@ void analyzeInternal(ASTNode ast, PlannerContextFactory pcf) throws SemanticExce } } - private void putAccessedColumnsToReadEntity(HashSet inputs, ColumnAccessInfo columnAccessInfo) { + private void putAccessedColumnsToReadEntity(Set inputs, ColumnAccessInfo columnAccessInfo) { Map> tableToColumnAccessMap = columnAccessInfo.getTableToColumnAccessMap(); if (tableToColumnAccessMap != null && !tableToColumnAccessMap.isEmpty()) { for(ReadEntity entity: inputs) { @@ -13029,7 +13001,7 @@ protected void saveViewDefinition() throws SemanticException { return tablesUsed; } - static List convertRowSchemaToViewSchema(RowResolver rr) throws SemanticException { + private static List convertRowSchemaToViewSchema(RowResolver rr) throws SemanticException { List fieldSchema = convertRowSchemaToResultSetSchema(rr, false); ParseUtils.validateColumnNameUniqueness(fieldSchema); return fieldSchema; @@ -13068,7 +13040,7 @@ public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input) return genExprNodeDesc(expr, input, true, false); } - public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, + ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, RowResolver outerRR, Map subqueryToRelNode, boolean useCaching) throws SemanticException { @@ -13079,12 +13051,12 @@ public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, } - public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, boolean useCaching) + ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, boolean useCaching) throws SemanticException { return genExprNodeDesc(expr, input, useCaching, false); } - public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, boolean useCaching, + private ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, boolean useCaching, boolean foldExpr) throws SemanticException { TypeCheckCtx tcCtx = new TypeCheckCtx(input, useCaching, foldExpr); return genExprNodeDesc(expr, input, tcCtx); @@ -13094,7 +13066,7 @@ public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, boolean use * Generates an expression node descriptors for the expression and children of it * with default TypeCheckCtx. */ - public Map genAllExprNodeDesc(ASTNode expr, RowResolver input) + Map genAllExprNodeDesc(ASTNode expr, RowResolver input) throws SemanticException { TypeCheckCtx tcCtx = new TypeCheckCtx(input); return genAllExprNodeDesc(expr, input, tcCtx); @@ -13104,7 +13076,7 @@ public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, boolean use * Returns expression node descriptor for the expression. * If it's evaluated already in previous operator, it can be retrieved from cache. */ - public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, + ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input, TypeCheckCtx tcCtx) throws SemanticException { // We recursively create the exprNodeDesc. Base cases: when we encounter // a column ref, we convert that into an exprNodeColumnDesc; when we @@ -13158,7 +13130,7 @@ private ExprNodeDesc getExprNodeDescCached(ASTNode expr, RowResolver input) * @throws SemanticException Failed to evaluate expression */ @SuppressWarnings("nls") - public Map genAllExprNodeDesc(ASTNode expr, RowResolver input, + Map genAllExprNodeDesc(ASTNode expr, RowResolver input, TypeCheckCtx tcCtx) throws SemanticException { // Create the walker and the rules dispatcher. tcCtx.setUnparseTranslator(unparseTranslator); @@ -13467,7 +13439,7 @@ private void updateDefaultTblProps(Map source, Map partCols, final List defConstraints, + private boolean hasConstraints(final List partCols, final List defConstraints, final List notNullConstraints, final List checkConstraints) { for(FieldSchema partFS: partCols) { @@ -14195,7 +14167,7 @@ private void validateCreateView() } // Process the position alias in GROUPBY and ORDERBY - public void processPositionAlias(ASTNode ast) throws SemanticException { + void processPositionAlias(ASTNode ast) throws SemanticException { boolean isBothByPos = HiveConf.getBoolVar(conf, ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS); boolean isGbyByPos = isBothByPos || HiveConf.getBoolVar(conf, ConfVars.HIVE_GROUPBY_POSITION_ALIAS); @@ -14779,10 +14751,10 @@ protected boolean isConstant() { } private static class AggregationExprCheck implements ContextVisitor { - HashMap destAggrExprs; + Map destAggrExprs; boolean isAggr = false; - public AggregationExprCheck(HashMap destAggrExprs) { + public AggregationExprCheck(Map destAggrExprs) { super(); this.destAggrExprs = destAggrExprs; } @@ -14857,8 +14829,8 @@ private PTFDesc translatePTFInvocationSpec(PTFInvocationSpec ptfQSpec, RowResolv return ptfDesc; } - Operator genPTFPlan(PTFInvocationSpec ptfQSpec, Operator input) throws SemanticException { - ArrayList componentQueries = PTFTranslator.componentize(ptfQSpec); + private Operator genPTFPlan(PTFInvocationSpec ptfQSpec, Operator input) throws SemanticException { + List componentQueries = PTFTranslator.componentize(ptfQSpec); for (PTFInvocationSpec ptfSpec : componentQueries) { input = genPTFPlanForComponentQuery(ptfSpec, input); } @@ -14876,10 +14848,10 @@ Operator genPTFPlan(PTFInvocationSpec ptfQSpec, Operator input) throws SemanticE * * @throws SemanticException */ - void buildPTFReduceSinkDetails(PartitionedTableFunctionDef tabDef, + private void buildPTFReduceSinkDetails(PartitionedTableFunctionDef tabDef, RowResolver inputRR, - ArrayList partCols, - ArrayList orderCols, + List partCols, + List orderCols, StringBuilder orderString, StringBuilder nullOrderString) throws SemanticException { @@ -14955,8 +14927,8 @@ private Operator genPTFPlanForComponentQuery(PTFInvocationSpec ptfQSpec, Operato * b. Build Reduce Sink Details (keyCols, valueCols, outColNames etc.) for this ptfDesc. */ - ArrayList partCols = new ArrayList(); - ArrayList orderCols = new ArrayList(); + List partCols = new ArrayList(); + List orderCols = new ArrayList(); StringBuilder orderString = new StringBuilder(); StringBuilder nullOrderString = new StringBuilder(); @@ -14999,7 +14971,7 @@ private Operator genPTFPlanForComponentQuery(PTFInvocationSpec ptfQSpec, Operato //--------------------------- Windowing handling: PTFInvocationSpec to PTFDesc -------------------- - Operator genWindowingPlan(QB qb, WindowingSpec wSpec, Operator input) throws SemanticException { + private Operator genWindowingPlan(QB qb, WindowingSpec wSpec, Operator input) throws SemanticException { wSpec.validateAndMakeEffective(); if (!isCBOExecuted() && !qb.getParseInfo().getDestToGroupBy().isEmpty()) { @@ -15038,8 +15010,8 @@ Operator genWindowingPlan(QB qb, WindowingSpec wSpec, Operator input) throws Sem private Operator genReduceSinkPlanForWindowing(WindowingSpec spec, RowResolver inputRR, Operator input) throws SemanticException{ - ArrayList partCols = new ArrayList(); - ArrayList orderCols = new ArrayList(); + List partCols = new ArrayList(); + List orderCols = new ArrayList(); StringBuilder order = new StringBuilder(); StringBuilder nullOrder = new StringBuilder(); @@ -15074,7 +15046,7 @@ private Operator genReduceSinkPlanForWindowing(WindowingSpec spec, -1, Operation.NOT_ACID); } - public static ArrayList parseSelect(String selectExprStr) + public static List parseSelect(String selectExprStr) throws SemanticException { ASTNode selNode = null; @@ -15085,7 +15057,7 @@ private Operator genReduceSinkPlanForWindowing(WindowingSpec spec, throw new SemanticException(pe); } - ArrayList selSpec = new ArrayList(); + List selSpec = new ArrayList(); int childCount = selNode.getChildCount(); for (int i = 0; i < childCount; i++) { ASTNode selExpr = (ASTNode) selNode.getChild(i); @@ -15197,24 +15169,24 @@ private boolean isAcidOutputFormat(Class of) { } } - protected boolean updating(String destination) { + private boolean updating(String destination) { return destination.startsWith(Context.DestClausePrefix.UPDATE.toString()); } - protected boolean deleting(String destination) { + private boolean deleting(String destination) { return destination.startsWith(Context.DestClausePrefix.DELETE.toString()); } // Make sure the proper transaction manager that supports ACID is being used - protected void checkAcidTxnManager(Table table) throws SemanticException { + private void checkAcidTxnManager(Table table) throws SemanticException { if (SessionState.get() != null && !getTxnMgr().supportsAcid() && !HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST_REPL)) { throw new SemanticException(ErrorMsg.TXNMGR_NOT_ACID, table.getDbName(), table.getTableName()); } } - public static ASTNode genSelectDIAST(RowResolver rr) { - LinkedHashMap> map = rr.getRslvMap(); + static ASTNode genSelectDIAST(RowResolver rr) { + Map> map = rr.getRslvMap(); ASTNode selectDI = new ASTNode(SELECTDI_TOKEN); // Note: this will determine the order of columns in the result. For now, the columns for each // table will be together; the order of the tables, as well as the columns within each diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java index 3734882e9b..d2b73d924e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse; -import java.util.ArrayList; +import java.util.List; /** * @@ -49,7 +49,7 @@ * In case the table does not have any clustering column, the usage of a table * sample clause without an ON part is disallowed by the compiler */ - private ArrayList exprs; + private List exprs; /** * Flag to indicate that input files can be pruned. @@ -67,7 +67,7 @@ * @param exprs * The list of expressions in the ON part of the TABLESAMPLE clause */ - public TableSample(String num, String den, ArrayList exprs) { + public TableSample(String num, String den, List exprs) { numerator = Integer.parseInt(num); denominator = Integer.parseInt(den); this.exprs = exprs; @@ -122,7 +122,7 @@ public void setDenominator(int den) { * * @return ArrayList<ASTNode> */ - public ArrayList getExprs() { + public List getExprs() { return exprs; } @@ -132,7 +132,7 @@ public void setDenominator(int den) { * @param exprs * The expression list */ - public void setExprs(ArrayList exprs) { + public void setExprs(List exprs) { this.exprs = exprs; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 0b55a39a8a..a5268ce8e8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -122,7 +122,7 @@ public void init(QueryState queryState, LogHelper console, Hive db) { @SuppressWarnings("nls") public void compile(final ParseContext pCtx, final List> rootTasks, - final HashSet inputs, final HashSet outputs) throws SemanticException { + final Set inputs, final Set outputs) throws SemanticException { Context ctx = pCtx.getContext(); GlobalLimitCtx globalLimitCtx = pCtx.getGlobalLimitCtx(); @@ -405,7 +405,7 @@ private String extractTableFullName(StatsTask tsk) throws SemanticException { return tsk.getWork().getFullTableName(); } - private Task genTableStats(ParseContext parseContext, TableScanOperator tableScan, Task currentTask, final HashSet outputs) + private Task genTableStats(ParseContext parseContext, TableScanOperator tableScan, Task currentTask, final Set outputs) throws HiveException { Class inputFormat = tableScan.getConf().getTableMetadata() .getInputFormatClass(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java index 5000ba47b3..916a172920 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java @@ -896,11 +896,9 @@ private static void removeSemijoinOptimizationFromSMBJoins( private static class TerminalOpsInfo { public Set> terminalOps; - public Set rsOps; - TerminalOpsInfo(Set> terminalOps, Set rsOps) { + TerminalOpsInfo(Set> terminalOps) { this.terminalOps = terminalOps; - this.rsOps = rsOps; } } @@ -928,7 +926,7 @@ private void connectTerminalOps(ParseContext pCtx) { OperatorUtils.findWorkOperatorsAndSemiJoinEdges(selOp, pCtx.getRsToSemiJoinBranchInfo(), workRSOps, workTerminalOps); - TerminalOpsInfo candidate = new TerminalOpsInfo(workTerminalOps, workRSOps); + TerminalOpsInfo candidate = new TerminalOpsInfo(workTerminalOps); // A work may contain multiple semijoin edges, traverse rsOps and add for each for (ReduceSinkOperator rsFound : workRSOps) { @@ -1094,7 +1092,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // -SEL-GB1-RS1-GB2-RS2 GroupByOperator gbOp = (GroupByOperator) stack.get(stack.size() - 2); GroupByDesc gbDesc = gbOp.getConf(); - ArrayList aggregationDescs = gbDesc.getAggregators(); + List aggregationDescs = gbDesc.getAggregators(); for (AggregationDesc agg : aggregationDescs) { if (!isBloomFilterAgg(agg)) { continue; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index a4c1b9ab38..86bb537764 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -1617,7 +1617,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, assert child.getType() == HiveParser.TOK_TABNAME; assert child.getChildCount() == 1; String tableAlias = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText()); - HashMap columns = input.getFieldMap(tableAlias); + Map columns = input.getFieldMap(tableAlias); if (columns == null) { throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(child)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java index 6713163ace..bc6b6b111c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.authorization; import java.io.Serializable; -import java.util.HashSet; +import java.util.Set; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate; @@ -37,40 +37,40 @@ @LimitedPrivate(value = { "Apache Hive, Apache Sentry (incubating)" }) @Evolving public interface HiveAuthorizationTaskFactory { - public Task createCreateRoleTask(ASTNode node, HashSet inputs, - HashSet outputs) throws SemanticException; + Task createCreateRoleTask(ASTNode node, Set inputs, + Set outputs) throws SemanticException; - public Task createDropRoleTask(ASTNode node, HashSet inputs, - HashSet outputs) throws SemanticException; + Task createDropRoleTask(ASTNode node, Set inputs, + Set outputs) throws SemanticException; - public Task createShowRoleGrantTask(ASTNode node, Path resultFile, - HashSet inputs, HashSet outputs) throws SemanticException; + Task createShowRoleGrantTask(ASTNode node, Path resultFile, + Set inputs, Set outputs) throws SemanticException; - public Task createGrantRoleTask(ASTNode node, HashSet inputs, - HashSet outputs) throws SemanticException; + Task createGrantRoleTask(ASTNode node, Set inputs, + Set outputs) throws SemanticException; - public Task createRevokeRoleTask(ASTNode node, HashSet inputs, - HashSet outputs) throws SemanticException; + Task createRevokeRoleTask(ASTNode node, Set inputs, + Set outputs) throws SemanticException; - public Task createGrantTask(ASTNode node, HashSet inputs, - HashSet outputs) throws SemanticException; + Task createGrantTask(ASTNode node, Set inputs, + Set outputs) throws SemanticException; - public Task createShowGrantTask(ASTNode node, Path resultFile, HashSet inputs, - HashSet outputs) throws SemanticException; + Task createShowGrantTask(ASTNode node, Path resultFile, Set inputs, + Set outputs) throws SemanticException; - public Task createRevokeTask(ASTNode node, HashSet inputs, - HashSet outputs) throws SemanticException; + Task createRevokeTask(ASTNode node, Set inputs, + Set outputs) throws SemanticException; - public Task createSetRoleTask(String roleName, - HashSet inputs, HashSet outputs) throws SemanticException; + Task createSetRoleTask(String roleName, + Set inputs, Set outputs) throws SemanticException; - public Task createShowCurrentRoleTask(HashSet inputs, - HashSet outputs, Path resFile) throws SemanticException; + Task createShowCurrentRoleTask(Set inputs, + Set outputs, Path resFile) throws SemanticException; - public Task createShowRolePrincipalsTask(ASTNode ast, Path resFile, - HashSet inputs, HashSet outputs) throws SemanticException; + Task createShowRolePrincipalsTask(ASTNode ast, Path resFile, + Set inputs, Set outputs) throws SemanticException; - public Task createShowRolesTask(ASTNode ast, Path resFile, - HashSet inputs, HashSet outputs) throws SemanticException; + Task createShowRolesTask(ASTNode ast, Path resFile, + Set inputs, Set outputs) throws SemanticException; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java index 756a33afc8..b122620bdb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java @@ -19,9 +19,9 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -73,22 +73,22 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { } @Override - public Task createCreateRoleTask(ASTNode ast, HashSet inputs, - HashSet outputs) { + public Task createCreateRoleTask(ASTNode ast, Set inputs, + Set outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); CreateRoleDesc createRoleDesc = new CreateRoleDesc(roleName); return TaskFactory.get(new DDLWork(inputs, outputs, createRoleDesc)); } @Override - public Task createDropRoleTask(ASTNode ast, HashSet inputs, - HashSet outputs) { + public Task createDropRoleTask(ASTNode ast, Set inputs, + Set outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); DropRoleDesc dropRoleDesc = new DropRoleDesc(roleName); return TaskFactory.get(new DDLWork(inputs, outputs, dropRoleDesc)); } @Override public Task createShowRoleGrantTask(ASTNode ast, Path resultFile, - HashSet inputs, HashSet outputs) { + Set inputs, Set outputs) { ASTNode child = (ASTNode) ast.getChild(0); PrincipalType principalType = PrincipalType.USER; switch (child.getType()) { @@ -107,8 +107,8 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { return TaskFactory.get(new DDLWork(inputs, outputs, showRoleGrantDesc)); } @Override - public Task createGrantTask(ASTNode ast, HashSet inputs, - HashSet outputs) throws SemanticException { + public Task createGrantTask(ASTNode ast, Set inputs, + Set outputs) throws SemanticException { List privilegeDesc = analyzePrivilegeListDef( (ASTNode) ast.getChild(0)); List principalDesc = AuthorizationParseUtils.analyzePrincipalListDef( @@ -135,8 +135,8 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { } @Override - public Task createRevokeTask(ASTNode ast, HashSet inputs, - HashSet outputs) throws SemanticException { + public Task createRevokeTask(ASTNode ast, Set inputs, + Set outputs) throws SemanticException { List privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0)); List principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1)); PrivilegeObjectDesc hiveObj = null; @@ -153,8 +153,8 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc)); } @Override - public Task createShowGrantTask(ASTNode ast, Path resultFile, HashSet inputs, - HashSet outputs) throws SemanticException { + public Task createShowGrantTask(ASTNode ast, Path resultFile, Set inputs, + Set outputs) throws SemanticException { PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; @@ -180,17 +180,17 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { return TaskFactory.get(new DDLWork(inputs, outputs, showGrant)); } @Override - public Task createGrantRoleTask(ASTNode ast, HashSet inputs, - HashSet outputs) { + public Task createGrantRoleTask(ASTNode ast, Set inputs, + Set outputs) { return analyzeGrantRevokeRole(true, ast, inputs, outputs); } @Override - public Task createRevokeRoleTask(ASTNode ast, HashSet inputs, - HashSet outputs) { + public Task createRevokeRoleTask(ASTNode ast, Set inputs, + Set outputs) { return analyzeGrantRevokeRole(false, ast, inputs, outputs); } private Task analyzeGrantRevokeRole(boolean isGrant, ASTNode ast, - HashSet inputs, HashSet outputs) { + Set inputs, Set outputs) { List principalDesc = AuthorizationParseUtils.analyzePrincipalListDef( (ASTNode) ast.getChild(0)); @@ -223,7 +223,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { } private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, - HashSet outputs) + Set outputs) throws SemanticException { PrivilegeObjectDesc subject = parsePrivObject(ast); @@ -335,24 +335,22 @@ private String toMessage(ErrorMsg message, Object detail) { } @Override - public Task createSetRoleTask(String roleName, - HashSet inputs, HashSet outputs) - throws SemanticException { + public Task createSetRoleTask(String roleName, Set inputs, + Set outputs) throws SemanticException { SetRoleDesc setRoleDesc = new SetRoleDesc(roleName); return TaskFactory.get(new DDLWork(inputs, outputs, setRoleDesc)); } @Override - public Task createShowCurrentRoleTask( - HashSet inputs, HashSet outputs, Path resFile) - throws SemanticException { + public Task createShowCurrentRoleTask( Set inputs, Set outputs, + Path resFile) throws SemanticException { ShowCurrentRoleDesc showCurrentRoleDesc = new ShowCurrentRoleDesc(resFile.toString()); return TaskFactory.get(new DDLWork(inputs, outputs, showCurrentRoleDesc)); } @Override - public Task createShowRolePrincipalsTask(ASTNode ast, Path resFile, - HashSet inputs, HashSet outputs) throws SemanticException { + public Task createShowRolePrincipalsTask(ASTNode ast, Path resFile, Set inputs, + Set outputs) throws SemanticException { String roleName; if (ast.getChildCount() == 1) { @@ -367,8 +365,8 @@ private String toMessage(ErrorMsg message, Object detail) { } @Override - public Task createShowRolesTask(ASTNode ast, Path resFile, - HashSet inputs, HashSet outputs) throws SemanticException { + public Task createShowRolesTask(ASTNode ast, Path resFile, Set inputs, + Set outputs) throws SemanticException { ShowRolesDesc showRolesDesc = new ShowRolesDesc(resFile.toString()); return TaskFactory.get(new DDLWork(inputs, outputs, showRolesDesc)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java index 87a7f4d224..6723de65b5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.List; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; @@ -33,7 +34,7 @@ private static final long serialVersionUID = 1L; private String genericUDAFName; - private java.util.ArrayList parameters; + private List parameters; private boolean distinct; private GenericUDAFEvaluator.Mode mode; @@ -49,7 +50,7 @@ public AggregationDesc() { public AggregationDesc(final String genericUDAFName, final GenericUDAFEvaluator genericUDAFEvaluator, - final java.util.ArrayList parameters, + final List parameters, final boolean distinct, final GenericUDAFEvaluator.Mode mode) { this.genericUDAFName = genericUDAFName; this.parameters = parameters; @@ -108,11 +109,11 @@ public void setGenericUDAFWritableEvaluator(GenericUDAFEvaluator genericUDAFWrit this.genericUDAFWritableEvaluator = genericUDAFWritableEvaluator; } - public java.util.ArrayList getParameters() { + public List getParameters() { return parameters; } - public void setParameters(final java.util.ArrayList parameters) { + public void setParameters(List parameters) { this.parameters = parameters; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java index 8a60d59d16..ffc3b791bf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java @@ -20,8 +20,8 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; +import java.util.Set; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.Task; @@ -41,20 +41,20 @@ private static final long serialVersionUID = 1L; private Path resFile; - private ArrayList> rootTasks; + private List> rootTasks; private Task fetchTask; private ASTNode astTree; private String astStringTree; - private HashSet inputs; - private HashSet outputs; + private Set inputs; + private Set outputs; private ParseContext pCtx; private ExplainConfiguration config; - boolean appendTaskType; + private boolean appendTaskType; - String cboInfo; - String cboPlan; + private String cboInfo; + private String cboPlan; private String optimizedSQL; @@ -101,11 +101,11 @@ public void setResFile(Path resFile) { this.resFile = resFile; } - public ArrayList> getRootTasks() { + public List> getRootTasks() { return rootTasks; } - public void setRootTasks(ArrayList> rootTasks) { + public void setRootTasks(List> rootTasks) { this.rootTasks = rootTasks; } @@ -117,19 +117,19 @@ public void setFetchTask(Task fetchTask) { this.fetchTask = fetchTask; } - public HashSet getInputs() { + public Set getInputs() { return inputs; } - public void setInputs(HashSet inputs) { + public void setInputs(Set inputs) { this.inputs = inputs; } - public HashSet getOutputs() { + public Set getOutputs() { return outputs; } - public void setOutputs(HashSet outputs) { + public void setOutputs(Set outputs) { this.outputs = outputs; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java index 687f66771a..09b0d7a0e7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java @@ -65,12 +65,12 @@ // no hash aggregations for group by private boolean bucketGroup; - private ArrayList keys; + private List keys; private List listGroupingSets; private boolean groupingSetsPresent; private int groupingSetPosition = -1; // /* in case of grouping sets; groupby1 will output values for every setgroup; this is the index of the column that information will be sent */ - private ArrayList aggregators; - private ArrayList outputColumnNames; + private List aggregators; + private List outputColumnNames; private float groupByMemoryUsage; private float memoryThreshold; private float minReductionHashAggr; @@ -82,9 +82,9 @@ public GroupByDesc() { public GroupByDesc( final Mode mode, - final ArrayList outputColumnNames, - final ArrayList keys, - final ArrayList aggregators, + final List outputColumnNames, + final List keys, + final List aggregators, final float groupByMemoryUsage, final float memoryThreshold, final float minReductionHashAggr, @@ -99,9 +99,9 @@ public GroupByDesc( public GroupByDesc( final Mode mode, - final ArrayList outputColumnNames, - final ArrayList keys, - final ArrayList aggregators, + final List outputColumnNames, + final List keys, + final List aggregators, final boolean bucketGroup, final float groupByMemoryUsage, final float memoryThreshold, @@ -166,22 +166,22 @@ public String getUserLevelExplainKeyString() { return PlanUtils.getExprListString(keys, true); } - public ArrayList getKeys() { + public List getKeys() { return keys; } - public void setKeys(final ArrayList keys) { + public void setKeys(final List keys) { this.keys = keys; } @Explain(displayName = "outputColumnNames") @Signature - public ArrayList getOutputColumnNames() { + public List getOutputColumnNames() { return outputColumnNames; } @Explain(displayName = "Output", explainLevels = { Level.USER }) - public ArrayList getUserLevelExplainOutputColumnNames() { + public List getUserLevelExplainOutputColumnNames() { return outputColumnNames; } @@ -192,8 +192,7 @@ public boolean pruneGroupingSetId() { outputColumnNames.size() != keys.size() + aggregators.size(); } - public void setOutputColumnNames( - ArrayList outputColumnNames) { + public void setOutputColumnNames(List outputColumnNames) { this.outputColumnNames = outputColumnNames; } @@ -236,12 +235,11 @@ public String getMinReductionHashAggrString() { return res; } - public ArrayList getAggregators() { + public List getAggregators() { return aggregators; } - public void setAggregators( - final ArrayList aggregators) { + public void setAggregators(List aggregators) { this.aggregators = aggregators; } @@ -267,7 +265,7 @@ public void setBucketGroup(boolean bucketGroup) { * columns behave like they were distinct - for example min and max operators. */ public boolean isDistinctLike() { - ArrayList aggregators = getAggregators(); + List aggregators = getAggregators(); for (AggregationDesc ad : aggregators) { if (!ad.getDistinct()) { GenericUDAFEvaluator udafEval = ad.getGenericUDAFEvaluator(); @@ -328,11 +326,11 @@ public void setDistinct(boolean isDistinct) { @Override public Object clone() { - ArrayList outputColumnNames = new ArrayList<>(); + List outputColumnNames = new ArrayList<>(); outputColumnNames.addAll(this.outputColumnNames); - ArrayList keys = new ArrayList<>(); + List keys = new ArrayList<>(); keys.addAll(this.keys); - ArrayList aggregators = new ArrayList<>(); + List aggregators = new ArrayList<>(); aggregators.addAll(this.aggregators); List listGroupingSets = new ArrayList<>(); listGroupingSets.addAll(this.listGroupingSets); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index 6e2754ae1f..b821cb993f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -22,6 +22,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import com.google.common.collect.ImmutableSet; import org.apache.hadoop.hive.conf.Constants; @@ -326,7 +327,7 @@ public String getDatabaseName() { return dbName; } - public Task getCreateTableTask(HashSet inputs, HashSet outputs, + public Task getCreateTableTask(Set inputs, Set outputs, HiveConf conf) { switch (getDescType()) { case TABLE: diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java index 97aa8ce864..d45c695402 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.plan; import java.util.ArrayList; +import java.util.List; import java.util.Objects; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,12 +35,12 @@ private static final long serialVersionUID = 1L; private int numSelColumns; - private ArrayList outputInternalColNames; + private List outputInternalColNames; public LateralViewJoinDesc() { } - public LateralViewJoinDesc(int numSelColumns, ArrayList outputInternalColNames) { + public LateralViewJoinDesc(int numSelColumns, List outputInternalColNames) { this.numSelColumns = numSelColumns; this.outputInternalColNames = outputInternalColNames; } @@ -49,12 +50,12 @@ public void setOutputInternalColNames(ArrayList outputInternalColNames) } @Explain(displayName = "outputColumnNames") - public ArrayList getOutputInternalColNames() { + public List getOutputInternalColNames() { return outputInternalColNames; } @Explain(displayName = "Output", explainLevels = { Level.USER }) - public ArrayList getUserLevelExplainOutputInternalColNames() { + public List getUserLevelExplainOutputInternalColNames() { return outputInternalColNames; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java index 41a26560c3..ef7e956fc7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java @@ -399,7 +399,7 @@ public boolean getCacheAffinity() { return llapIoDesc.cached; } - public void setNameToSplitSample(HashMap nameToSplitSample) { + public void setNameToSplitSample(Map nameToSplitSample) { this.nameToSplitSample = nameToSplitSample; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java index 8ca8e46919..d7253a4955 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; -import java.util.HashSet; import java.util.List; +import java.util.Set; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -45,30 +45,29 @@ /** * ReadEntitites that are passed to the hooks. */ - protected HashSet inputs; + protected Set inputs; /** * List of WriteEntities that are passed to the hooks. */ - protected HashSet outputs; + protected Set outputs; /** * List of inserted partitions */ protected List movedParts; - private boolean isNoop; private boolean isInReplicationScope = false; public MoveWork() { } - private MoveWork(HashSet inputs, HashSet outputs) { + private MoveWork(Set inputs, Set outputs) { this.inputs = inputs; this.outputs = outputs; this.needCleanTarget = true; } - public MoveWork(HashSet inputs, HashSet outputs, + public MoveWork(Set inputs, Set outputs, final LoadTableDesc loadTableWork, final LoadFileDesc loadFileWork, boolean checkFileFormat, boolean srcLocal) { this(inputs, outputs); @@ -82,7 +81,7 @@ public MoveWork(HashSet inputs, HashSet outputs, this.srcLocal = srcLocal; } - public MoveWork(HashSet inputs, HashSet outputs, + public MoveWork(Set inputs, Set outputs, final LoadTableDesc loadTableWork, final LoadFileDesc loadFileWork, boolean checkFileFormat) { this(inputs, outputs, loadTableWork, loadFileWork, checkFileFormat, false); @@ -134,19 +133,19 @@ public void setCheckFileFormat(boolean checkFileFormat) { this.checkFileFormat = checkFileFormat; } - public HashSet getInputs() { + public Set getInputs() { return inputs; } - public HashSet getOutputs() { + public Set getOutputs() { return outputs; } - public void setInputs(HashSet inputs) { + public void setInputs(Set inputs) { this.inputs = inputs; } - public void setOutputs(HashSet outputs) { + public void setOutputs(Set outputs) { this.outputs = outputs; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index 217a7633bc..d9f88fe731 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -665,7 +665,7 @@ public static TableDesc getReduceValueTableDesc(List fieldSchemas) */ public static List getFieldSchemasFromRowSchema(RowSchema row, String fieldPrefix) { - ArrayList c = row.getSignature(); + List c = row.getSignature(); return getFieldSchemasFromColumnInfo(c, fieldPrefix); } @@ -673,7 +673,7 @@ public static TableDesc getReduceValueTableDesc(List fieldSchemas) * Convert the ColumnInfo to FieldSchema. */ public static List getFieldSchemasFromColumnInfo( - ArrayList cols, String fieldPrefix) { + List cols, String fieldPrefix) { if ((cols == null) || (cols.size() == 0)) { return new ArrayList(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java index 1fbfa4f814..6b37a59ab1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java @@ -705,7 +705,7 @@ else if ( isPlus ) TypeCheckCtx selectListInputTypeCheckCtx; StructObjectInspector selectListInputOI; - ArrayList selectSpec; + List selectSpec; ResultExprInfo resultExprInfo; diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java index 594688bad3..98121e98d4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java @@ -19,9 +19,9 @@ package org.apache.hadoop.hive.ql.parse; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import org.junit.Assert; @@ -159,7 +159,7 @@ public void testJoinView1AndTable2() throws ParseException { Assert.assertNotNull(cols.contains("name1")); } - private Map> getColsFromReadEntity(HashSet inputs) { + private Map> getColsFromReadEntity(Set inputs) { Map> tableColsMap = new HashMap>(); for(ReadEntity entity: inputs) { switch (entity.getType()) { diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java index cd246dec90..322fbbd7f3 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java @@ -108,22 +108,21 @@ QBJoinTree createJoinTree(JoinType type, children[0] = leftAlias; children[1] = rightAlias; jT.setBaseSrc(children); - ArrayList> expressions = new ArrayList>(); + List> expressions = new ArrayList>(); expressions.add(new ArrayList()); expressions.add(new ArrayList()); jT.setExpressions(expressions); - ArrayList nullsafes = new ArrayList(); + List nullsafes = new ArrayList(); jT.setNullSafes(nullsafes); - ArrayList> filters = new ArrayList>(); + List> filters = new ArrayList>(); filters.add(new ArrayList()); filters.add(new ArrayList()); jT.setFilters(filters); jT.setFilterMap(new int[2][]); - ArrayList> filtersForPushing = - new ArrayList>(); + List> filtersForPushing = new ArrayList>(); filtersForPushing.add(new ArrayList()); filtersForPushing.add(new ArrayList()); jT.setFiltersForPushing(filtersForPushing);