Index: hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java =================================================================== --- hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java (revision 903901) +++ hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java (working copy) @@ -22,7 +22,6 @@ import java.io.File; import java.io.FileReader; import java.util.ArrayList; -import java.util.Vector; import junit.framework.TestCase; @@ -110,7 +109,7 @@ zero1.add(0); assertEquals(zero, searchItem.getQueryRet()); - Vector> searchBlockRes = searchItem.getResultBucket(); + ArrayList> searchBlockRes = searchItem.getResultBucket(); String resLine = searchBlockRes.get(0).get(0); assertEquals(true, resLine.contains("key")); Index: hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java =================================================================== --- hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java (revision 903901) +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java (working copy) @@ -21,7 +21,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; -import java.util.Vector; +import java.util.ArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -126,8 +126,8 @@ * * @return A set of SessionItems this framework manages */ - public Vector findAllSessionItems() { - Vector otherItems = new Vector(); + public ArrayList findAllSessionItems() { + ArrayList otherItems = new ArrayList(); for (HWIAuth a : items.keySet()) { otherItems.addAll(items.get(a)); } Index: hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java =================================================================== --- hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java (revision 903901) +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java (working copy) @@ -84,4 +84,4 @@ return true; } -} \ No newline at end of file +} Index: hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java =================================================================== --- hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java (revision 903901) +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java (working copy) @@ -24,7 +24,6 @@ import java.io.PrintStream; import java.util.ArrayList; import java.util.List; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -74,7 +73,7 @@ * The results from the Driver. This is used for storing the most result * results from the driver in memory */ - private Vector> resultBucket; + private ArrayList> resultBucket; /** Limits the resultBucket to be no greater then this size */ private int resultBucketMaxSize; @@ -109,7 +108,7 @@ status = WebSessionItemStatus.NEW; queries = new ArrayList(); queryRet = new ArrayList(); - resultBucket = new Vector>(); + resultBucket = new ArrayList>(); resultBucketMaxSize = 1000; runnable = new Thread(this); runnable.start(); @@ -189,7 +188,7 @@ throwIfRunning(); queries = new ArrayList(); queryRet = new ArrayList(); - resultBucket = new Vector>(); + resultBucket = new ArrayList>(); resultFile = null; errorFile = null; // this.conf = null; @@ -332,7 +331,7 @@ if (proc instanceof Driver) { Driver qp = (Driver) proc; queryRet.add(new Integer(qp.run(cmd))); - Vector res = new Vector(); + ArrayList res = new ArrayList(); try { while (qp.getResults(res)) { resultBucket.add(res); @@ -558,7 +557,7 @@ } /** gets the value for resultBucket */ - public Vector> getResultBucket() { + public ArrayList> getResultBucket() { return resultBucket; } Index: hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java =================================================================== --- hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java (revision 903901) +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java (working copy) @@ -72,7 +72,7 @@ * setting these as a system property we avoid having to specifically pass * them */ - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (String arg : args) { sb.append(arg + " "); } Index: service/src/java/org/apache/hadoop/hive/service/HiveServer.java =================================================================== --- service/src/java/org/apache/hadoop/hive/service/HiveServer.java (revision 903901) +++ service/src/java/org/apache/hadoop/hive/service/HiveServer.java (working copy) @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.service; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -229,7 +229,7 @@ return ""; } - Vector result = new Vector(); + ArrayList result = new ArrayList(); driver.setMaxRows(1); try { if (driver.getResults(result)) { @@ -267,10 +267,10 @@ } if (!isHiveQuery) { // Return no results if the last command was not a Hive query - return new Vector(); + return new ArrayList(); } - Vector result = new Vector(); + ArrayList result = new ArrayList(); driver.setMaxRows(numRows); try { driver.getResults(result); @@ -295,11 +295,11 @@ public List fetchAll() throws HiveServerException, TException { if (!isHiveQuery) { // Return no results if the last command was not a Hive query - return new Vector(); + return new ArrayList(); } - Vector rows = new Vector(); - Vector result = new Vector(); + ArrayList rows = new ArrayList(); + ArrayList result = new ArrayList(); try { while (driver.getResults(result)) { rows.addAll(result); Index: serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefWritable.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefWritable.java (revision 903901) +++ serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefWritable.java (working copy) @@ -185,7 +185,7 @@ /** {@inheritDoc} */ @Override public String toString() { - StringBuffer sb = new StringBuffer(3 * length); + StringBuilder sb = new StringBuilder(3 * length); for (int idx = start; idx < length; idx++) { // if not the first, put a blank separator in if (idx != 0) { @@ -245,4 +245,4 @@ public int getStart() { return start; } -} \ No newline at end of file +} Index: serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java (revision 903901) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java (working copy) @@ -331,4 +331,4 @@ } return mapSize; } -} \ No newline at end of file +} Index: serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java (revision 903901) +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java (working copy) @@ -90,7 +90,7 @@ if (!specialConstructor) { return super.getMessage(); } - StringBuffer expected = new StringBuffer(); + StringBuilder expected = new StringBuilder(); int maxSize = 0; for (int[] expectedTokenSequence : expectedTokenSequences) { if (maxSize < expectedTokenSequence.length) { @@ -139,7 +139,7 @@ * version cannot be used as part of an ASCII string literal. */ protected String add_escapes(String str) { - StringBuffer retval = new StringBuffer(); + StringBuilder retval = new StringBuilder(); char ch; for (int i = 0; i < str.length(); i++) { switch (str.charAt(i)) { Index: serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java (revision 903901) +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java (working copy) @@ -275,7 +275,7 @@ @Override public String toString() { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); String prefix = ""; for (DynamicSerDeField t : getChildren()) { result.append(prefix + t.fieldid + ":" Index: serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java (revision 903901) +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java (working copy) @@ -37,7 +37,7 @@ * equivalents in the given string */ protected static final String addEscapes(String str) { - StringBuffer retval = new StringBuffer(); + StringBuilder retval = new StringBuilder(); char ch; for (int i = 0; i < str.length(); i++) { switch (str.charAt(i)) { Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (revision 903901) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (working copy) @@ -298,7 +298,7 @@ + getObjectInspectorName(moi.getMapValueObjectInspector()) + ">"; } case STRUCT: { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); result.append(oi.getClass().getSimpleName() + "<"); StructObjectInspector soi = (StructObjectInspector) oi; List fields = soi.getAllStructFieldRefs(); Index: serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java (revision 903901) +++ serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java (working copy) @@ -247,7 +247,7 @@ } public String nextToken() throws EOFException { - StringBuffer ret = null; + StringBuilder ret = null; boolean done = false; if (tokenizer == null) { @@ -267,7 +267,7 @@ if (nextToken.equals(separator)) { done = true; } else if (ret == null) { - ret = new StringBuffer(nextToken); + ret = new StringBuilder(nextToken); } else { ret.append(nextToken); } Index: metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java =================================================================== --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (revision 903901) +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (working copy) @@ -757,4 +757,4 @@ } assert (threwException); } -} \ No newline at end of file +} Index: metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (revision 903901) +++ metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (working copy) @@ -225,7 +225,7 @@ public static String makePartName(Map spec) throws MetaException { - StringBuffer suffixBuf = new StringBuffer(); + StringBuilder suffixBuf = new StringBuilder(); for (Entry e : spec.entrySet()) { if (e.getValue() == null || e.getValue().length() == 0) { throw new MetaException("Partition spec is incorrect. " + spec); Index: cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java =================================================================== --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (revision 903901) +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (working copy) @@ -26,11 +26,11 @@ import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.util.Arrays; +import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Vector; import jline.ArgumentCompletor; import jline.ConsoleReader; @@ -143,7 +143,7 @@ return ret; } - Vector res = new Vector(); + ArrayList res = new ArrayList(); try { while (qp.getResults(res)) { for (String r : res) { @@ -211,7 +211,7 @@ public int processReader(BufferedReader r) throws IOException { String line; - StringBuffer qsb = new StringBuffer(); + StringBuilder qsb = new StringBuilder(); while ((line = r.readLine()) != null) { qsb.append(line + "\n"); Index: ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (revision 903901) +++ ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (working copy) @@ -36,4 +36,4 @@ result.set(s.toString().length()); return result; } -} \ No newline at end of file +} Index: ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (revision 903901) +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (working copy) @@ -224,7 +224,7 @@ FileInputStream fis = new FileInputStream(qf); BufferedInputStream bis = new BufferedInputStream(fis); DataInputStream dis = new DataInputStream(bis); - StringBuffer qsb = new StringBuffer(); + StringBuilder qsb = new StringBuilder(); // Read the entire query while (dis.available() != 0) { Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (working copy) @@ -27,7 +27,6 @@ import java.util.List; import java.util.Map; import java.util.Properties; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -311,9 +310,9 @@ return getTTable().getParameters().get(name); } - public Vector getFields() { + public ArrayList getFields() { - Vector fields = new Vector(); + ArrayList fields = new ArrayList(); try { Deserializer decoder = getDeserializer(); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (working copy) @@ -27,7 +27,6 @@ import java.util.Map; import java.util.Set; import java.util.Stack; -import java.util.Vector; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; @@ -312,8 +311,8 @@ if (cols.size() < originalOutputColumnNames.size()) { ArrayList newColList = new ArrayList(); ArrayList newOutputColumnNames = new ArrayList(); - Vector rs_oldsignature = op.getSchema().getSignature(); - Vector rs_newsignature = new Vector(); + ArrayList rs_oldsignature = op.getSchema().getSignature(); + ArrayList rs_newsignature = new ArrayList(); RowResolver old_rr = cppCtx.getOpToParseCtxMap().get(op).getRR(); RowResolver new_rr = new RowResolver(); for (String col : cols) { @@ -397,7 +396,7 @@ ReduceSinkDesc reduceConf = reduce.getConf(); Map oldMap = reduce.getColumnExprMap(); Map newMap = new HashMap(); - Vector sig = new Vector(); + ArrayList sig = new ArrayList(); RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(reduce).getRR(); RowResolver newRR = new RowResolver(); ArrayList originalValueOutputColNames = reduceConf @@ -515,7 +514,7 @@ RowResolver joinRR = cppCtx.getOpToParseCtxMap().get(op).getRR(); RowResolver newJoinRR = new RowResolver(); ArrayList outputCols = new ArrayList(); - Vector rs = new Vector(); + ArrayList rs = new ArrayList(); Map newColExprMap = new HashMap(); for (int i = 0; i < conf.getOutputColumnNames().size(); i++) { Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (working copy) @@ -176,8 +176,8 @@ Operator extract = OperatorFactory.getAndMakeChild(new ExtractDesc( new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, - Utilities.ReduceField.VALUE.toString(), "", false)), new RowSchema( - out_rwsch.getColumnInfos())); + Utilities.ReduceField.VALUE.toString(), "", false)), + new RowSchema(out_rwsch.getColumnInfos())); TableDesc ts = (TableDesc) fsConf.getTableInfo().clone(); fsConf Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java (working copy) @@ -232,7 +232,7 @@ MapredWork clonePlan = null; try { String xmlPlan = currPlan.toXML(); - StringBuffer sb = new StringBuffer(xmlPlan); + StringBuilder sb = new StringBuilder(xmlPlan); ByteArrayInputStream bis; bis = new ByteArrayInputStream(sb.toString().getBytes("UTF-8")); clonePlan = Utilities.deserializeMapRedWork(bis, parseCtx.getConf()); @@ -377,4 +377,4 @@ + UNDERLINE + srcTblBigTbl + UNDERLINE + srcTblSmallTbl; } -} \ No newline at end of file +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (working copy) @@ -20,8 +20,8 @@ import java.io.IOException; import java.io.Serializable; +import java.util.ArrayList; import java.util.Properties; -import java.util.Vector; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; @@ -104,7 +104,7 @@ } @Override - public boolean fetch(Vector res) throws IOException { + public boolean fetch(ArrayList res) throws IOException { try { int numRows = 0; int rowsRet = maxRows; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java (working copy) @@ -43,4 +43,4 @@ Class getEvaluatorClass(List argClasses) throws AmbiguousMethodException; -} \ No newline at end of file +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (working copy) @@ -24,7 +24,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -138,7 +137,7 @@ } // dummy method - FetchTask overwrites this - public boolean fetch(Vector res) throws IOException { + public boolean fetch(ArrayList res) throws IOException { assert false; return false; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (working copy) @@ -213,7 +213,7 @@ } @Override - protected void fatalErrorMessage(StringBuffer errMsg, long counterCode) { + protected void fatalErrorMessage(StringBuilder errMsg, long counterCode) { errMsg.append("Operator " + getOperatorId() + " (id=" + id + "): " + fatalErrMsg[(int) counterCode]); } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (working copy) @@ -86,7 +86,7 @@ * addJobConfToEnvironment is shamelessly copied from hadoop streaming. */ static String safeEnvVarName(String var) { - StringBuffer safe = new StringBuffer(); + StringBuilder safe = new StringBuilder(); int len = var.length(); for (int i = 0; i < len; i++) { char c = var.charAt(i); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java (working copy) @@ -19,29 +19,28 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Vector; +import java.util.ArrayList; /** * RowSchema Implementation - **/ - + */ public class RowSchema implements Serializable { private static final long serialVersionUID = 1L; - private Vector signature; + private ArrayList signature; public RowSchema() { } - public RowSchema(Vector signature) { + public RowSchema(ArrayList signature) { this.signature = signature; } - public void setSignature(Vector signature) { + public void setSignature(ArrayList signature) { this.signature = signature; } - public Vector getSignature() { + public ArrayList getSignature() { return signature; } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (working copy) @@ -25,7 +25,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -129,13 +128,13 @@ /** * Implements the getChildren function for the Node Interface. */ - public Vector getChildren() { + public ArrayList getChildren() { if (getChildOperators() == null) { return null; } - Vector ret_vec = new Vector(); + ArrayList ret_vec = new ArrayList(); for (Operator op : getChildOperators()) { ret_vec.add(op); } @@ -935,7 +934,7 @@ * * @param ctrs */ - public boolean checkFatalErrors(Counters ctrs, StringBuffer errMsg) { + public boolean checkFatalErrors(Counters ctrs, StringBuilder errMsg) { if (counterNameToEnum == null) { return false; } @@ -976,7 +975,7 @@ * @param counterValue * input counter code. */ - protected void fatalErrorMessage(StringBuffer errMsg, long counterValue) { + protected void fatalErrorMessage(StringBuilder errMsg, long counterValue) { } // A given query can have multiple map-reduce jobs Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (working copy) @@ -272,7 +272,7 @@ * @return true if fatal errors happened during job execution, false * otherwise. */ - protected boolean checkFatalErrors(TaskHandle t, StringBuffer errMsg) { + protected boolean checkFatalErrors(TaskHandle t, StringBuilder errMsg) { ExecDriverTaskHandle th = (ExecDriverTaskHandle) t; RunningJob rj = th.getRunningJob(); try { @@ -300,7 +300,7 @@ long reportTime = System.currentTimeMillis(); long maxReportInterval = 60 * 1000; // One minute boolean fatal = false; - StringBuffer errMsg = new StringBuffer(); + StringBuilder errMsg = new StringBuilder(); while (!rj.isComplete()) { try { Thread.sleep(1000); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (working copy) @@ -409,7 +409,7 @@ return ""; } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(indent); sb.append(op.toString()); sb.append("\n"); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (working copy) @@ -348,8 +348,8 @@ for (int tag = 0; tag < exprs.size(); tag++) { List valueCols = exprs.get((byte) tag); int columnSize = valueCols.size(); - StringBuffer colNames = new StringBuffer(); - StringBuffer colTypes = new StringBuffer(); + StringBuilder colNames = new StringBuilder(); + StringBuilder colTypes = new StringBuilder(); if (columnSize <= 0) { continue; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java (working copy) @@ -108,4 +108,4 @@ this.partSpec = partSpec; } -} \ No newline at end of file +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (working copy) @@ -25,7 +25,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Properties; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -338,7 +337,7 @@ */ public static List getFieldSchemasFromRowSchema(RowSchema row, String fieldPrefix) { - Vector c = row.getSignature(); + ArrayList c = row.getSignature(); return getFieldSchemasFromColumnInfo(c, fieldPrefix); } @@ -346,7 +345,7 @@ * Convert the ColumnInfo to FieldSchema. */ public static List getFieldSchemasFromColumnInfo( - Vector cols, String fieldPrefix) { + ArrayList cols, String fieldPrefix) { if ((cols == null) || (cols.size() == 0)) { return new ArrayList(); } Index: ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java (working copy) @@ -106,7 +106,7 @@ BufferedReader reader = new BufferedReader(new InputStreamReader(fi)); try { String line = null; - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); while ((line = reader.readLine()) != null) { buf.append(line); // if it does not end with " then it is line continuation @@ -114,7 +114,7 @@ continue; } parseLine(buf.toString(), l); - buf = new StringBuffer(); + buf = new StringBuilder(); } } finally { try { @@ -229,7 +229,7 @@ return; } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(rt.name()); for (Map.Entry ent : keyValMap.entrySet()) { Index: ql/src/java/org/apache/hadoop/hive/ql/lib/Node.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/Node.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/Node.java (working copy) @@ -30,7 +30,7 @@ * Gets the vector of children nodes. This is used in the graph walker * algorithms. * - * @return Vector + * @return List */ public List getChildren(); Index: ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java (working copy) @@ -41,4 +41,4 @@ public void startWalking(Collection startNodes, HashMap nodeOutput) throws SemanticException; -} \ No newline at end of file +} Index: ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java (working copy) @@ -187,7 +187,7 @@ */ @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(inputSplitShim.toString()); sb.append("InputFormatClass: " + inputFormatClassName); sb.append("\n"); Index: ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java (working copy) @@ -293,7 +293,7 @@ */ @Deprecated public final String readLine() throws IOException { - StringBuffer line = new StringBuffer(80); // Typical line length + StringBuilder line = new StringBuilder(80); // Typical line length boolean foundTerminator = false; while (true) { int nextByte = in.read(); Index: ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java (working copy) @@ -145,4 +145,4 @@ } }; } -} \ No newline at end of file +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (working copy) @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; -import java.util.Vector; import java.util.Map.Entry; /** @@ -44,10 +43,10 @@ private final HashMap> rhsSemijoin; // join conditions - private Vector> expressions; + private ArrayList> expressions; // filters - private Vector> filters; + private ArrayList> filters; // user asked for map-side join private boolean mapSideJoin; @@ -101,11 +100,11 @@ this.leftAliases = leftAliases; } - public Vector> getExpressions() { + public ArrayList> getExpressions() { return expressions; } - public void setExpressions(Vector> expressions) { + public void setExpressions(ArrayList> expressions) { this.expressions = expressions; } @@ -160,7 +159,7 @@ /** * @return the filters */ - public Vector> getFilters() { + public ArrayList> getFilters() { return filters; } @@ -168,7 +167,7 @@ * @param filters * the filters to set */ - public void setFilters(Vector> filters) { + public void setFilters(ArrayList> filters) { this.filters = filters; } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java (working copy) @@ -101,7 +101,7 @@ @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(getName()); sb.append("("); boolean isfirst = true; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java (working copy) @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse; -import java.util.Vector; +import java.util.ArrayList; import org.antlr.runtime.Token; import org.antlr.runtime.tree.CommonTree; @@ -50,12 +50,12 @@ * * @see org.apache.hadoop.hive.ql.lib.Node#getChildren() */ - public Vector getChildren() { + public ArrayList getChildren() { if (super.getChildCount() == 0) { return null; } - Vector ret_vec = new Vector(); + ArrayList ret_vec = new ArrayList(); for (int i = 0; i < super.getChildCount(); ++i) { ret_vec.add((Node) super.getChild(i)); } @@ -89,11 +89,11 @@ } public String dump() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append('('); sb.append(toString()); - Vector children = getChildren(); + ArrayList children = getChildren(); if (children != null) { for (Node node : getChildren()) { if (node instanceof ASTNode) { Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -28,7 +28,6 @@ import java.util.Map; import java.util.Set; import java.util.TreeSet; -import java.util.Vector; import java.util.Map.Entry; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; @@ -865,7 +864,7 @@ @SuppressWarnings("nls") private void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn, - Vector leftAliases, Vector rightAliases, + ArrayList leftAliases, ArrayList rightAliases, ArrayList fields) throws SemanticException { // String[] allAliases = joinTree.getAllAliases(); switch (condn.getToken().getType()) { @@ -957,9 +956,9 @@ } } - private void populateAliases(Vector leftAliases, - Vector rightAliases, ASTNode condn, QBJoinTree joinTree, - Vector leftSrc) throws SemanticException { + private void populateAliases(ArrayList leftAliases, + ArrayList rightAliases, ASTNode condn, QBJoinTree joinTree, + ArrayList leftSrc) throws SemanticException { if ((leftAliases.size() != 0) && (rightAliases.size() != 0)) { throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1 .getMsg(condn)); @@ -1001,7 +1000,7 @@ * @throws SemanticException */ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, - Vector leftSrc) throws SemanticException { + ArrayList leftSrc) throws SemanticException { if (joinCond == null) { return; } @@ -1018,14 +1017,14 @@ case HiveParser.EQUAL: ASTNode leftCondn = (ASTNode) joinCond.getChild(0); - Vector leftCondAl1 = new Vector(); - Vector leftCondAl2 = new Vector(); + ArrayList leftCondAl1 = new ArrayList(); + ArrayList leftCondAl2 = new ArrayList(); parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2, null); ASTNode rightCondn = (ASTNode) joinCond.getChild(1); - Vector rightCondAl1 = new Vector(); - Vector rightCondAl2 = new Vector(); + ArrayList rightCondAl1 = new ArrayList(); + ArrayList rightCondAl2 = new ArrayList(); parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1, rightCondAl2, null); @@ -1069,13 +1068,13 @@ // Create all children int childrenBegin = (isFunction ? 1 : 0); - ArrayList> leftAlias = new ArrayList>( + ArrayList> leftAlias = new ArrayList>( joinCond.getChildCount() - childrenBegin); - ArrayList> rightAlias = new ArrayList>( + ArrayList> rightAlias = new ArrayList>( joinCond.getChildCount() - childrenBegin); for (int ci = 0; ci < joinCond.getChildCount() - childrenBegin; ci++) { - Vector left = new Vector(); - Vector right = new Vector(); + ArrayList left = new ArrayList(); + ArrayList right = new ArrayList(); leftAlias.add(left); rightAlias.add(right); } @@ -1087,7 +1086,7 @@ } boolean leftAliasNull = true; - for (Vector left : leftAlias) { + for (ArrayList left : leftAlias) { if (left.size() != 0) { leftAliasNull = false; break; @@ -1095,7 +1094,7 @@ } boolean rightAliasNull = true; - for (Vector right : rightAlias) { + for (ArrayList right : rightAlias) { if (right.size() != 0) { rightAliasNull = false; break; @@ -1433,7 +1432,7 @@ StringBuilder inpColumns = new StringBuilder(); StringBuilder inpColumnTypes = new StringBuilder(); - Vector inputSchema = opParseCtx.get(input).getRR() + ArrayList inputSchema = opParseCtx.get(input).getRR() .getColumnInfos(); for (int i = 0; i < inputSchema.size(); ++i) { if (i != 0) { @@ -3046,7 +3045,7 @@ } String cols = new String(); String colTypes = new String(); - Vector colInfos = inputRR.getColumnInfos(); + ArrayList colInfos = inputRR.getColumnInfos(); // CTAS case: the file output format and serde are defined by the create // table command @@ -3138,7 +3137,7 @@ input = genConversionSelectOperator(dest, qb, input, table_desc); inputRR = opParseCtx.get(input).getRR(); - Vector vecCol = new Vector(); + ArrayList vecCol = new ArrayList(); try { StructObjectInspector rowObjectInspector = (StructObjectInspector) table_desc @@ -3185,7 +3184,7 @@ // Check column number List tableFields = oi.getAllStructFieldRefs(); - Vector rowFields = opParseCtx.get(input).getRR() + ArrayList rowFields = opParseCtx.get(input).getRR() .getColumnInfos(); if (tableFields.size() != rowFields.size()) { String reason = "Table " + dest + " has " + tableFields.size() @@ -3314,7 +3313,7 @@ // resulting output object inspector can be used to make the RowResolver // for the UDTF operator RowResolver selectRR = opParseCtx.get(input).getRR(); - Vector inputCols = selectRR.getColumnInfos(); + ArrayList inputCols = selectRR.getColumnInfos(); // Create the object inspector for the input columns and initialize the UDTF ArrayList colNames = new ArrayList(); @@ -3586,7 +3585,7 @@ ArrayList reduceKeys = new ArrayList(); // Compute join keys and store in reduceKeys - Vector exprs = joinTree.getExpressions().get(pos); + ArrayList exprs = joinTree.getExpressions().get(pos); for (int i = 0; i < exprs.size(); i++) { ASTNode expr = exprs.get(i); reduceKeys.add(genExprNodeDesc(expr, inputRS)); @@ -3646,7 +3645,7 @@ Operator joinSrcOp = null; if (leftChild != null) { Operator joinOp = genJoinOperator(qb, leftChild, map); - Vector filter = joinTree.getFilters().get(0); + ArrayList filter = joinTree.getFilters().get(0); for (ASTNode cond : filter) { joinOp = genFilterPlan(qb, cond, joinOp); } @@ -3850,7 +3849,7 @@ */ private void pushJoinFilters(QB qb, QBJoinTree joinTree, HashMap map) throws SemanticException { - Vector> filters = joinTree.getFilters(); + ArrayList> filters = joinTree.getFilters(); if (joinTree.getJoinSrc() != null) { pushJoinFilters(qb, joinTree.getJoinSrc(), map); } @@ -3859,7 +3858,7 @@ for (String src : joinTree.getBaseSrc()) { if (src != null) { Operator srcOp = map.get(src); - Vector filter = filters.get(pos); + ArrayList filter = filters.get(pos); for (ASTNode cond : filter) { srcOp = genFilterPlan(qb, cond, srcOp); } @@ -3895,14 +3894,14 @@ QBJoinTree joinTree = new QBJoinTree(); joinTree.setNoOuterJoin(false); - joinTree.setExpressions(new Vector>()); - joinTree.setFilters(new Vector>()); + joinTree.setExpressions(new ArrayList>()); + joinTree.setFilters(new ArrayList>()); // Create joinTree structures to fill them up later - Vector rightAliases = new Vector(); - Vector leftAliases = new Vector(); - Vector baseSrc = new Vector(); - Vector preserved = new Vector(); + ArrayList rightAliases = new ArrayList(); + ArrayList leftAliases = new ArrayList(); + ArrayList baseSrc = new ArrayList(); + ArrayList preserved = new ArrayList(); boolean lastPreserved = false; int cols = -1; @@ -3941,8 +3940,8 @@ + "number of keys in UNIQUEJOIN"); } - Vector expressions = new Vector(); - Vector filt = new Vector(); + ArrayList expressions = new ArrayList(); + ArrayList filt = new ArrayList(); for (Node exp : child.getChildren()) { expressions.add((ASTNode) exp); @@ -4065,18 +4064,18 @@ assert false; } - Vector> expressions = new Vector>(); - expressions.add(new Vector()); - expressions.add(new Vector()); + ArrayList> expressions = new ArrayList>(); + expressions.add(new ArrayList()); + expressions.add(new ArrayList()); joinTree.setExpressions(expressions); - Vector> filters = new Vector>(); - filters.add(new Vector()); - filters.add(new Vector()); + ArrayList> filters = new ArrayList>(); + filters.add(new ArrayList()); + filters.add(new ArrayList()); joinTree.setFilters(filters); ASTNode joinCond = (ASTNode) joinParseTree.getChild(2); - Vector leftSrc = new Vector(); + ArrayList leftSrc = new ArrayList(); parseJoinCondition(joinTree, joinCond, leftSrc); if (leftSrc.size() == 1) { joinTree.setLeftAlias(leftSrc.get(0)); @@ -4164,18 +4163,18 @@ } target.setBaseSrc(baseSrc); - Vector> expr = target.getExpressions(); + ArrayList> expr = target.getExpressions(); for (int i = 0; i < nodeRightAliases.length; i++) { expr.add(node.getExpressions().get(i + 1)); } - Vector> filter = target.getFilters(); + ArrayList> filter = target.getFilters(); for (int i = 0; i < nodeRightAliases.length; i++) { filter.add(node.getFilters().get(i + 1)); } if (node.getFilters().get(0).size() != 0) { - Vector filterPos = filter.get(pos); + ArrayList filterPos = filter.get(pos); filterPos.addAll(node.getFilters().get(0)); } @@ -4239,8 +4238,8 @@ return -1; } - Vector nodeCondn = node.getExpressions().get(0); - Vector targetCondn = null; + ArrayList nodeCondn = node.getExpressions().get(0); + ArrayList targetCondn = null; if (leftAlias.equals(target.getLeftAlias())) { targetCondn = target.getExpressions().get(0); @@ -4308,7 +4307,7 @@ throws SemanticException { OpParseContext inputCtx = opParseCtx.get(input); RowResolver inputRR = inputCtx.getRR(); - Vector columns = inputRR.getColumnInfos(); + ArrayList columns = inputRR.getColumnInfos(); ArrayList colList = new ArrayList(); ArrayList columnNames = new ArrayList(); for (int i = 0; i < columns.size(); i++) { @@ -5148,8 +5147,7 @@ */ private void LVmergeRowResolvers(RowResolver source, RowResolver dest, ArrayList outputInternalColNames) { - Vector cols = source.getColumnInfos(); - for (ColumnInfo c : cols) { + for (ColumnInfo c : source.getColumnInfos()) { String internalName = getColumnInternalName(outputInternalColNames.size()); outputInternalColNames.add(internalName); ColumnInfo newCol = new ColumnInfo(internalName, c.getType(), c Index: ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (working copy) @@ -22,7 +22,7 @@ import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; -import java.util.Vector; +import java.util.ArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -32,8 +32,7 @@ /** * Implementation of the Row Resolver * - **/ - + */ public class RowResolver { private final RowSchema rowSchema; @@ -60,7 +59,7 @@ } col_alias = col_alias.toLowerCase(); if (rowSchema.getSignature() == null) { - rowSchema.setSignature(new Vector()); + rowSchema.setSignature(new ArrayList()); } rowSchema.getSignature().add(colInfo); @@ -133,7 +132,7 @@ return ret; } - public Vector getColumnInfos() { + public ArrayList getColumnInfos() { return rowSchema.getSignature(); } @@ -176,7 +175,7 @@ @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (Map.Entry> e : rslvMap .entrySet()) { Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -38,7 +38,6 @@ import java.util.Queue; import java.util.Random; import java.util.Set; -import java.util.Vector; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; @@ -702,7 +701,7 @@ } } - public boolean getResults(Vector res) throws IOException { + public boolean getResults(ArrayList res) throws IOException { if (plan != null && plan.getFetchTask() != null) { FetchTask ft = (FetchTask) plan.getFetchTask(); ft.setMaxRows(maxRows); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java (revision 903901) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java (working copy) @@ -190,4 +190,4 @@ result.set(value, first, value.length - first); return result; } -} \ No newline at end of file +}