Index: ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (working copy) @@ -752,7 +752,6 @@ protected void checkAndGenObject() throws HiveException { if (condn[0].getType() == JoinDesc.UNIQUE_JOIN) { - new IntermediateObject(new ArrayList[numAliases], 0); // Check if results need to be emitted. // Results only need to be emitted if there is a non-null entry in a table Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (working copy) @@ -52,10 +52,10 @@ import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.SubStructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; @@ -726,7 +726,7 @@ */ @Override public String getName() { - return new String("FS"); + return "FS"; } @Override Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java (working copy) @@ -104,7 +104,7 @@ */ @Override public String getName() { - return new String("FIL"); + return "FIL"; } @Override Index: ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (working copy) @@ -451,11 +451,11 @@ **/ private int getSize(int pos, Class c, Field f) { if (c.isPrimitive() - || c.isInstance(new Boolean(true)) - || c.isInstance(new Byte((byte) 0)) - || c.isInstance(new Short((short) 0)) - || c.isInstance(new Integer(0)) - || c.isInstance(new Long(0)) + || c.isInstance(Boolean.valueOf(true)) + || c.isInstance(Byte.valueOf((byte) 0)) + || c.isInstance(Short.valueOf((short) 0)) + || c.isInstance(Integer.valueOf(0)) + || c.isInstance(Long.valueOf(0)) || c.isInstance(new Float(0)) || c.isInstance(new Double(0))) { return javaSizePrimitiveType; @@ -1051,7 +1051,7 @@ */ @Override public String getName() { - return new String("GBY"); + return "GBY"; } @Override Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (working copy) @@ -814,7 +814,7 @@ * @return the name of the operator */ public String getName() { - return new String("OP"); + return "OP"; } /** Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (working copy) @@ -310,7 +310,7 @@ */ @Override public String getName() { - return new String("RS"); + return "RS"; } @Override Index: ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (working copy) @@ -89,7 +89,7 @@ */ @Override public String getName() { - return new String("SEL"); + return "SEL"; } @Override Index: ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (working copy) @@ -36,9 +36,9 @@ import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapred.JobConf; @@ -208,7 +208,7 @@ **/ @Override public String getName() { - return new String("TS"); + return "TS"; } // this 'neededColumnIDs' field is included in this operator class instead of Index: ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java (working copy) @@ -85,7 +85,7 @@ private static ThreadLocal tid = new ThreadLocal() { @Override protected synchronized Integer initialValue() { - return new Integer(0); + return Integer.valueOf(0); } }; @@ -96,7 +96,7 @@ } public static void resetId() { - tid.set(new Integer(0)); + tid.set(Integer.valueOf(0)); } @SuppressWarnings("unchecked") Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java (working copy) @@ -137,7 +137,7 @@ */ @Override public String getName() { - return new String("UNION"); + return "UNION"; } @Override Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java (working copy) @@ -71,7 +71,7 @@ * @see org.apache.hadoop.hive.ql.lib.Node#getName() */ public String getName() { - return (new Integer(super.getToken().getType())).toString(); + return (Integer.valueOf(super.getToken().getType())).toString(); } /** Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy) @@ -295,7 +295,7 @@ if (val > 127) { val = val - 256; } - bArray[j++] = new Integer(val).byteValue(); + bArray[j++] = (byte)val; } String res = new String(bArray, charSetName); @@ -555,17 +555,18 @@ if (children <= 0) { throw new SemanticException("empty struct not allowed."); } + StringBuilder buffer = new StringBuilder(typeStr); for (int i = 0; i < children; i++) { ASTNode child = (ASTNode) typeNode.getChild(i); - typeStr += unescapeIdentifier(child.getChild(0).getText()) + ":"; - typeStr += getTypeStringFromAST((ASTNode) child.getChild(1)); + buffer.append(unescapeIdentifier(child.getChild(0).getText())).append(":"); + buffer.append(getTypeStringFromAST((ASTNode) child.getChild(1))); if (i < children - 1) { - typeStr += ","; + buffer.append(","); } } - typeStr += ">"; - return typeStr; + buffer.append(">"); + return buffer.toString(); } private static String getUnionTypeStringFromAST(ASTNode typeNode) @@ -576,13 +577,15 @@ if (children <= 0) { throw new SemanticException("empty union not allowed."); } + StringBuilder buffer = new StringBuilder(typeStr); for (int i = 0; i < children; i++) { - typeStr += getTypeStringFromAST((ASTNode) typeNode.getChild(i)); + buffer.append(getTypeStringFromAST((ASTNode) typeNode.getChild(i))); if (i < children - 1) { - typeStr += ","; + buffer.append(","); } } - typeStr += ">"; + buffer.append(">"); + typeStr = buffer.toString(); return typeStr; } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (working copy) @@ -99,7 +99,7 @@ private HashSet semanticInputs; private List> rootTasks; - + public ParseContext() { } @@ -176,7 +176,6 @@ this.uCtx = uCtx; this.listMapJoinOpsNoReducer = listMapJoinOpsNoReducer; hasNonPartCols = false; - this.groupOpToInputTables = new HashMap>(); this.groupOpToInputTables = groupOpToInputTables; this.prunedPartitions = prunedPartitions; this.opToSamplePruner = opToSamplePruner; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1148179) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -27,9 +27,9 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; +import java.util.Map.Entry; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; @@ -92,7 +92,6 @@ import org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1; import org.apache.hadoop.hive.ql.optimizer.GenMROperator; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext; -import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx; import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink1; import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink2; import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink3; @@ -102,6 +101,7 @@ import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils; import org.apache.hadoop.hive.ql.optimizer.MapJoinFactory; import org.apache.hadoop.hive.ql.optimizer.Optimizer; +import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx; import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalContext; import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalOptimizer; import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; @@ -122,7 +122,6 @@ import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; -import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc; import org.apache.hadoop.hive.ql.plan.ForwardDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; @@ -145,12 +144,13 @@ import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.plan.UDTFDesc; import org.apache.hadoop.hive.ql.plan.UnionDesc; +import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.ResourceType; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -158,9 +158,9 @@ import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -3841,8 +3841,8 @@ + dest_path, e); } } - String cols = new String(); - String colTypes = new String(); + String cols = ""; + String colTypes = ""; ArrayList colInfos = inputRR.getColumnInfos(); // CTAS case: the file output format and serde are defined by the create @@ -4260,7 +4260,6 @@ private ArrayList getParitionColsFromBucketCols(String dest, QB qb, Table tab, TableDesc table_desc, Operator input, boolean convert) throws SemanticException { - RowResolver inputRR = opParseCtx.get(input).getRowResolver(); List tabBucketCols = tab.getBucketCols(); List tabCols = tab.getCols(); @@ -6861,26 +6860,26 @@ // the operator stack. // The dispatcher generates the plan from the operator tree Map opRules = new LinkedHashMap(); - opRules.put(new RuleRegExp(new String("R1"), "TS%"), new GenMRTableScan1()); - opRules.put(new RuleRegExp(new String("R2"), "TS%.*RS%"), + opRules.put(new RuleRegExp("R1", "TS%"), new GenMRTableScan1()); + opRules.put(new RuleRegExp("R2", "TS%.*RS%"), new GenMRRedSink1()); - opRules.put(new RuleRegExp(new String("R3"), "RS%.*RS%"), + opRules.put(new RuleRegExp("R3", "RS%.*RS%"), new GenMRRedSink2()); - opRules.put(new RuleRegExp(new String("R4"), "FS%"), new GenMRFileSink1()); - opRules.put(new RuleRegExp(new String("R5"), "UNION%"), new GenMRUnion1()); - opRules.put(new RuleRegExp(new String("R6"), "UNION%.*RS%"), + opRules.put(new RuleRegExp("R4", "FS%"), new GenMRFileSink1()); + opRules.put(new RuleRegExp("R5", "UNION%"), new GenMRUnion1()); + opRules.put(new RuleRegExp("R6", "UNION%.*RS%"), new GenMRRedSink3()); - opRules.put(new RuleRegExp(new String("R6"), "MAPJOIN%.*RS%"), + opRules.put(new RuleRegExp("R6", "MAPJOIN%.*RS%"), new GenMRRedSink4()); - opRules.put(new RuleRegExp(new String("R7"), "TS%.*MAPJOIN%"), + opRules.put(new RuleRegExp("R7", "TS%.*MAPJOIN%"), MapJoinFactory.getTableScanMapJoin()); - opRules.put(new RuleRegExp(new String("R8"), "RS%.*MAPJOIN%"), + opRules.put(new RuleRegExp("R8", "RS%.*MAPJOIN%"), MapJoinFactory.getReduceSinkMapJoin()); - opRules.put(new RuleRegExp(new String("R9"), "UNION%.*MAPJOIN%"), + opRules.put(new RuleRegExp("R9", "UNION%.*MAPJOIN%"), MapJoinFactory.getUnionMapJoin()); - opRules.put(new RuleRegExp(new String("R10"), "MAPJOIN%.*MAPJOIN%"), + opRules.put(new RuleRegExp("R10", "MAPJOIN%.*MAPJOIN%"), MapJoinFactory.getMapJoinMapJoin()); - opRules.put(new RuleRegExp(new String("R11"), "MAPJOIN%SEL%"), + opRules.put(new RuleRegExp("R11", "MAPJOIN%SEL%"), MapJoinFactory.getMapJoin()); // The dispatcher fires the processor corresponding to the closest matching Index: serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java (revision 1148179) +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeField.java (working copy) @@ -28,8 +28,8 @@ // [this.fieldid :] Requiredness() FieldType() this.name FieldValue() // [CommaOrSemicolon()] - private final int FD_REQUIREDNESS = 0; - private final int FD_FIELD_TYPE = 1; + private static final int FD_REQUIREDNESS = 0; + private static final int FD_FIELD_TYPE = 1; public boolean isSkippable() { return ((DynamicSerDeFieldRequiredness) jjtGetChild(FD_REQUIREDNESS)) Index: serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldType.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldType.java (revision 1148179) +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldType.java (working copy) @@ -27,7 +27,7 @@ // production: this.name | BaseType() | MapType() | SetType() | ListType() - private final int FD_FIELD_TYPE = 0; + private static final int FD_FIELD_TYPE = 0; public DynamicSerDeFieldType(int i) { super(i); Index: serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunction.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunction.java (revision 1148179) +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFunction.java (working copy) @@ -29,7 +29,7 @@ // production is: Async() FunctionType() NAME FieldList() Throws() // [CommaOrSemicolon] - private final int FD_FIELD_LIST = 2; + private static final int FD_FIELD_LIST = 2; public DynamicSerDeFunction(int i) { super(i);