diff --git cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index 7d7d4dd..9d821e6 100644 --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -26,11 +26,11 @@ import java.io.IOException; import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.util.Arrays; +import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Vector; import jline.ArgumentCompletor; import jline.ConsoleReader; @@ -143,7 +143,7 @@ public class CliDriver { return ret; } - Vector res = new Vector(); + ArrayList res = new ArrayList(); try { while (qp.getResults(res)) { for (String r : res) { @@ -211,7 +211,7 @@ public class CliDriver { public int processReader(BufferedReader r) throws IOException { String line; - StringBuffer qsb = new StringBuffer(); + StringBuilder qsb = new StringBuilder(); while ((line = r.readLine()) != null) { qsb.append(line + "\n"); diff --git hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java index 528398a..2fb2a45 100644 --- hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java @@ -84,4 +84,4 @@ public class HWIAuth implements Comparable { return true; } -} \ No newline at end of file +} diff --git hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java index 9487a4d..22a8993 100644 --- hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java @@ -72,7 +72,7 @@ public class HWIServer { * setting these as a system property we avoid having to specifically pass * them */ - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (String arg : args) { sb.append(arg + " "); } diff --git hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java index 87036a7..6ada147 100644 --- hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.List; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -74,7 +73,7 @@ public class HWISessionItem implements Runnable, Comparable { * The results from the Driver. This is used for storing the most result * results from the driver in memory */ - private Vector> resultBucket; + private ArrayList> resultBucket; /** Limits the resultBucket to be no greater then this size */ private int resultBucketMaxSize; @@ -109,7 +108,7 @@ public class HWISessionItem implements Runnable, Comparable { status = WebSessionItemStatus.NEW; queries = new ArrayList(); queryRet = new ArrayList(); - resultBucket = new Vector>(); + resultBucket = new ArrayList>(); resultBucketMaxSize = 1000; runnable = new Thread(this); runnable.start(); @@ -189,7 +188,7 @@ public class HWISessionItem implements Runnable, Comparable { throwIfRunning(); queries = new ArrayList(); queryRet = new ArrayList(); - resultBucket = new Vector>(); + resultBucket = new ArrayList>(); resultFile = null; errorFile = null; // this.conf = null; @@ -332,7 +331,7 @@ public class HWISessionItem implements Runnable, Comparable { if (proc instanceof Driver) { Driver qp = (Driver) proc; queryRet.add(new Integer(qp.run(cmd))); - Vector res = new Vector(); + ArrayList res = new ArrayList(); try { while (qp.getResults(res)) { resultBucket.add(res); @@ -558,7 +557,7 @@ public class HWISessionItem implements Runnable, Comparable { } /** gets the value for resultBucket */ - public Vector> getResultBucket() { + public ArrayList> getResultBucket() { return resultBucket; } diff --git hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java index 764a71a..dde770d 100644 --- hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java +++ hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java @@ -21,7 +21,7 @@ import java.util.Collection; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; -import java.util.Vector; +import java.util.ArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -126,8 +126,8 @@ public class HWISessionManager implements Runnable { * * @return A set of SessionItems this framework manages */ - public Vector findAllSessionItems() { - Vector otherItems = new Vector(); + public ArrayList findAllSessionItems() { + ArrayList otherItems = new ArrayList(); for (HWIAuth a : items.keySet()) { otherItems.addAll(items.get(a)); } diff --git hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java index 9893c34..781b46e 100644 --- hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java +++ hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java @@ -22,7 +22,6 @@ import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.util.ArrayList; -import java.util.Vector; import junit.framework.TestCase; @@ -110,7 +109,7 @@ public class TestHWISessionManager extends TestCase { zero1.add(0); assertEquals(zero, searchItem.getQueryRet()); - Vector> searchBlockRes = searchItem.getResultBucket(); + ArrayList> searchBlockRes = searchItem.getResultBucket(); String resLine = searchBlockRes.get(0).get(0); assertEquals(true, resLine.contains("key")); diff --git metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java index d60a9fd..204b0b6 100755 --- metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -225,7 +225,7 @@ public class Warehouse { public static String makePartName(Map spec) throws MetaException { - StringBuffer suffixBuf = new StringBuffer(); + StringBuilder suffixBuf = new StringBuilder(); for (Entry e : spec.entrySet()) { if (e.getValue() == null || e.getValue().length() == 0) { throw new MetaException("Partition spec is incorrect. " + spec); diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 6443d2c..37b8713 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -757,4 +757,4 @@ public class TestHiveMetaStore extends TestCase { } assert (threwException); } -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 389052d..f842604 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; -import java.util.Vector; +import java.util.ArrayList; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; @@ -676,7 +676,7 @@ public class Driver implements CommandProcessor { } } - public boolean getResults(Vector res) throws IOException { + public boolean getResults(ArrayList res) throws IOException { if (plan != null && plan.getPlan().getFetchTask() != null) { BaseSemanticAnalyzer sem = plan.getPlan(); if (!sem.getFetchTaskInit()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java index beaebce..02866db 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java @@ -348,8 +348,8 @@ public abstract class CommonJoinOperator extends for (int tag = 0; tag < exprs.size(); tag++) { List valueCols = exprs.get((byte) tag); int columnSize = valueCols.size(); - StringBuffer colNames = new StringBuffer(); - StringBuffer colTypes = new StringBuffer(); + StringBuilder colNames = new StringBuilder(); + StringBuilder colTypes = new StringBuilder(); if (columnSize <= 0) { continue; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java index 821abf1..c9718b7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java @@ -272,7 +272,7 @@ public class ExecDriver extends Task implements Serializable { * @return true if fatal errors happened during job execution, false * otherwise. */ - protected boolean checkFatalErrors(TaskHandle t, StringBuffer errMsg) { + protected boolean checkFatalErrors(TaskHandle t, StringBuilder errMsg) { ExecDriverTaskHandle th = (ExecDriverTaskHandle) t; RunningJob rj = th.getRunningJob(); try { @@ -300,7 +300,7 @@ public class ExecDriver extends Task implements Serializable { long reportTime = System.currentTimeMillis(); long maxReportInterval = 60 * 1000; // One minute boolean fatal = false; - StringBuffer errMsg = new StringBuffer(); + StringBuilder errMsg = new StringBuilder(); while (!rj.isComplete()) { try { Thread.sleep(1000); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java index 8708b54..51d45d1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.exec; import java.io.IOException; import java.io.Serializable; +import java.util.ArrayList; import java.util.Properties; -import java.util.Vector; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; @@ -104,7 +104,7 @@ public class FetchTask extends Task implements Serializable { } @Override - public boolean fetch(Vector res) throws IOException { + public boolean fetch(ArrayList res) throws IOException { try { int numRows = 0; int rowsRet = maxRows; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java index 88c4668..26fa769 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java @@ -213,7 +213,7 @@ public class MapJoinOperator extends CommonJoinOperator implements } @Override - protected void fatalErrorMessage(StringBuffer errMsg, long counterCode) { + protected void fatalErrorMessage(StringBuilder errMsg, long counterCode) { errMsg.append("Operator " + getOperatorId() + " (id=" + id + "): " + fatalErrMsg[(int) counterCode]); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java index ffaa460..00560f7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java @@ -25,7 +25,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -129,13 +128,13 @@ public abstract class Operator implements Serializable, /** * Implements the getChildren function for the Node Interface. */ - public Vector getChildren() { + public ArrayList getChildren() { if (getChildOperators() == null) { return null; } - Vector ret_vec = new Vector(); + ArrayList ret_vec = new ArrayList(); for (Operator op : getChildOperators()) { ret_vec.add(op); } @@ -935,7 +934,7 @@ public abstract class Operator implements Serializable, * * @param ctrs */ - public boolean checkFatalErrors(Counters ctrs, StringBuffer errMsg) { + public boolean checkFatalErrors(Counters ctrs, StringBuilder errMsg) { if (counterNameToEnum == null) { return false; } @@ -976,7 +975,7 @@ public abstract class Operator implements Serializable, * @param counterValue * input counter code. */ - protected void fatalErrorMessage(StringBuffer errMsg, long counterValue) { + protected void fatalErrorMessage(StringBuilder errMsg, long counterValue) { } // A given query can have multiple map-reduce jobs diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java index db9982c..bc573d1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java @@ -19,29 +19,28 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Vector; +import java.util.ArrayList; /** * RowSchema Implementation - **/ - + */ public class RowSchema implements Serializable { private static final long serialVersionUID = 1L; - private Vector signature; + private ArrayList signature; public RowSchema() { } - public RowSchema(Vector signature) { + public RowSchema(ArrayList signature) { this.signature = signature; } - public void setSignature(Vector signature) { + public void setSignature(ArrayList signature) { this.signature = signature; } - public Vector getSignature() { + public ArrayList getSignature() { return signature; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java index 71e6387..57beb85 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java @@ -86,7 +86,7 @@ public class ScriptOperator extends Operator implements * addJobConfToEnvironment is shamelessly copied from hadoop streaming. */ static String safeEnvVarName(String var) { - StringBuffer safe = new StringBuffer(); + StringBuilder safe = new StringBuilder(); int len = var.length(); for (int i = 0; i < len; i++) { char c = var.charAt(i); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 86a6fb9..ca6f70e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -138,7 +137,7 @@ public abstract class Task implements Serializable, } // dummy method - FetchTask overwrites this - public boolean fetch(Vector res) throws IOException { + public boolean fetch(ArrayList res) throws IOException { assert false; return false; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java index e367133..6b60927 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java @@ -43,4 +43,4 @@ public interface UDAFEvaluatorResolver { Class getEvaluatorClass(List argClasses) throws AmbiguousMethodException; -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 89acc30..8305d66 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -409,7 +409,7 @@ public class Utilities { return ""; } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(indent); sb.append(op.toString()); sb.append("\n"); diff --git ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java index cf1fada..af5a808 100644 --- ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java +++ ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java @@ -106,7 +106,7 @@ public class HiveHistory { BufferedReader reader = new BufferedReader(new InputStreamReader(fi)); try { String line = null; - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); while ((line = reader.readLine()) != null) { buf.append(line); // if it does not end with " then it is line continuation @@ -114,7 +114,7 @@ public class HiveHistory { continue; } parseLine(buf.toString(), l); - buf = new StringBuffer(); + buf = new StringBuilder(); } } finally { try { @@ -229,7 +229,7 @@ public class HiveHistory { return; } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(rt.name()); for (Map.Entry ent : keyValMap.entrySet()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java index 6df1d1d..80869a9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java @@ -187,7 +187,7 @@ public class CombineHiveInputFormat startNodes, HashMap nodeOutput) throws SemanticException; -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index abfea5c..a52a96a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -27,7 +27,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Properties; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -311,9 +310,9 @@ public class Table { return getTTable().getParameters().get(name); } - public Vector getFields() { + public ArrayList getFields() { - Vector fields = new Vector(); + ArrayList fields = new ArrayList(); try { Deserializer decoder = getDeserializer(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java index 87b8d2f..7478170 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java @@ -27,7 +27,6 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; -import java.util.Vector; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; @@ -312,8 +311,8 @@ public class ColumnPrunerProcFactory { if (cols.size() < originalOutputColumnNames.size()) { ArrayList newColList = new ArrayList(); ArrayList newOutputColumnNames = new ArrayList(); - Vector rs_oldsignature = op.getSchema().getSignature(); - Vector rs_newsignature = new Vector(); + ArrayList rs_oldsignature = op.getSchema().getSignature(); + ArrayList rs_newsignature = new ArrayList(); RowResolver old_rr = cppCtx.getOpToParseCtxMap().get(op).getRR(); RowResolver new_rr = new RowResolver(); for (String col : cols) { @@ -397,7 +396,7 @@ public class ColumnPrunerProcFactory { ReduceSinkDesc reduceConf = reduce.getConf(); Map oldMap = reduce.getColumnExprMap(); Map newMap = new HashMap(); - Vector sig = new Vector(); + ArrayList sig = new ArrayList(); RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(reduce).getRR(); RowResolver newRR = new RowResolver(); ArrayList originalValueOutputColNames = reduceConf @@ -515,7 +514,7 @@ public class ColumnPrunerProcFactory { RowResolver joinRR = cppCtx.getOpToParseCtxMap().get(op).getRR(); RowResolver newJoinRR = new RowResolver(); ArrayList outputCols = new ArrayList(); - Vector rs = new Vector(); + ArrayList rs = new ArrayList(); Map newColExprMap = new HashMap(); for (int i = 0; i < conf.getOutputColumnNames().size(); i++) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java index 269ac89..a4eaf3e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java @@ -176,8 +176,8 @@ public class GenMRFileSink1 implements NodeProcessor { Operator extract = OperatorFactory.getAndMakeChild(new ExtractDesc( new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, - Utilities.ReduceField.VALUE.toString(), "", false)), new RowSchema( - out_rwsch.getColumnInfos())); + Utilities.ReduceField.VALUE.toString(), "", false)), + new RowSchema(out_rwsch.getColumnInfos())); TableDesc ts = (TableDesc) fsConf.getTableInfo().clone(); fsConf diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java index 1d3df7b..2e6180e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java @@ -232,7 +232,7 @@ public class GenMRSkewJoinProcessor { MapredWork clonePlan = null; try { String xmlPlan = currPlan.toXML(); - StringBuffer sb = new StringBuffer(xmlPlan); + StringBuilder sb = new StringBuilder(xmlPlan); ByteArrayInputStream bis; bis = new ByteArrayInputStream(sb.toString().getBytes("UTF-8")); clonePlan = Utilities.deserializeMapRedWork(bis, parseCtx.getConf()); @@ -377,4 +377,4 @@ public class GenMRSkewJoinProcessor { + UNDERLINE + srcTblBigTbl + UNDERLINE + srcTblSmallTbl; } -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java index 8d29774..2a00cc1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse; -import java.util.Vector; +import java.util.ArrayList; import org.antlr.runtime.Token; import org.antlr.runtime.tree.CommonTree; @@ -50,12 +50,12 @@ public class ASTNode extends CommonTree implements Node { * * @see org.apache.hadoop.hive.ql.lib.Node#getChildren() */ - public Vector getChildren() { + public ArrayList getChildren() { if (super.getChildCount() == 0) { return null; } - Vector ret_vec = new Vector(); + ArrayList ret_vec = new ArrayList(); for (int i = 0; i < super.getChildCount(); ++i) { ret_vec.add((Node) super.getChild(i)); } @@ -89,11 +89,11 @@ public class ASTNode extends CommonTree implements Node { } public String dump() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append('('); sb.append(toString()); - Vector children = getChildren(); + ArrayList children = getChildren(); if (children != null) { for (Node node : getChildren()) { if (node instanceof ASTNode) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java index d7d7ac5..213d446 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java @@ -101,7 +101,7 @@ public class InputSignature { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(getName()); sb.append("("); boolean isfirst = true; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java index 76fc352..5a25d35 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.parse; import java.util.ArrayList; import java.util.HashMap; import java.util.List; -import java.util.Vector; import java.util.Map.Entry; /** @@ -44,10 +43,10 @@ public class QBJoinTree { private final HashMap> rhsSemijoin; // join conditions - private Vector> expressions; + private ArrayList> expressions; // filters - private Vector> filters; + private ArrayList> filters; // user asked for map-side join private boolean mapSideJoin; @@ -101,11 +100,11 @@ public class QBJoinTree { this.leftAliases = leftAliases; } - public Vector> getExpressions() { + public ArrayList> getExpressions() { return expressions; } - public void setExpressions(Vector> expressions) { + public void setExpressions(ArrayList> expressions) { this.expressions = expressions; } @@ -160,7 +159,7 @@ public class QBJoinTree { /** * @return the filters */ - public Vector> getFilters() { + public ArrayList> getFilters() { return filters; } @@ -168,7 +167,7 @@ public class QBJoinTree { * @param filters * the filters to set */ - public void setFilters(Vector> filters) { + public void setFilters(ArrayList> filters) { this.filters = filters; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java index 599da9a..95a7943 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java @@ -22,7 +22,7 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; -import java.util.Vector; +import java.util.ArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -32,8 +32,7 @@ import org.apache.hadoop.hive.ql.exec.RowSchema; /** * Implementation of the Row Resolver * - **/ - + */ public class RowResolver { private final RowSchema rowSchema; @@ -60,7 +59,7 @@ public class RowResolver { } col_alias = col_alias.toLowerCase(); if (rowSchema.getSignature() == null) { - rowSchema.setSignature(new Vector()); + rowSchema.setSignature(new ArrayList()); } rowSchema.getSignature().add(colInfo); @@ -133,7 +132,7 @@ public class RowResolver { return ret; } - public Vector getColumnInfos() { + public ArrayList getColumnInfos() { return rowSchema.getSignature(); } @@ -176,7 +175,7 @@ public class RowResolver { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (Map.Entry> e : rslvMap .entrySet()) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 624d212..4512a09 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -28,7 +28,6 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; -import java.util.Vector; import java.util.Map.Entry; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; @@ -864,7 +863,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { @SuppressWarnings("nls") private void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn, - Vector leftAliases, Vector rightAliases, + ArrayList leftAliases, ArrayList rightAliases, ArrayList fields) throws SemanticException { // String[] allAliases = joinTree.getAllAliases(); switch (condn.getToken().getType()) { @@ -956,9 +955,9 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } } - private void populateAliases(Vector leftAliases, - Vector rightAliases, ASTNode condn, QBJoinTree joinTree, - Vector leftSrc) throws SemanticException { + private void populateAliases(ArrayList leftAliases, + ArrayList rightAliases, ASTNode condn, QBJoinTree joinTree, + ArrayList leftSrc) throws SemanticException { if ((leftAliases.size() != 0) && (rightAliases.size() != 0)) { throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1 .getMsg(condn)); @@ -1000,7 +999,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { * @throws SemanticException */ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, - Vector leftSrc) throws SemanticException { + ArrayList leftSrc) throws SemanticException { if (joinCond == null) { return; } @@ -1017,14 +1016,14 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { case HiveParser.EQUAL: ASTNode leftCondn = (ASTNode) joinCond.getChild(0); - Vector leftCondAl1 = new Vector(); - Vector leftCondAl2 = new Vector(); + ArrayList leftCondAl1 = new ArrayList(); + ArrayList leftCondAl2 = new ArrayList(); parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2, null); ASTNode rightCondn = (ASTNode) joinCond.getChild(1); - Vector rightCondAl1 = new Vector(); - Vector rightCondAl2 = new Vector(); + ArrayList rightCondAl1 = new ArrayList(); + ArrayList rightCondAl2 = new ArrayList(); parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1, rightCondAl2, null); @@ -1068,13 +1067,13 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { // Create all children int childrenBegin = (isFunction ? 1 : 0); - ArrayList> leftAlias = new ArrayList>( + ArrayList> leftAlias = new ArrayList>( joinCond.getChildCount() - childrenBegin); - ArrayList> rightAlias = new ArrayList>( + ArrayList> rightAlias = new ArrayList>( joinCond.getChildCount() - childrenBegin); for (int ci = 0; ci < joinCond.getChildCount() - childrenBegin; ci++) { - Vector left = new Vector(); - Vector right = new Vector(); + ArrayList left = new ArrayList(); + ArrayList right = new ArrayList(); leftAlias.add(left); rightAlias.add(right); } @@ -1086,7 +1085,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } boolean leftAliasNull = true; - for (Vector left : leftAlias) { + for (ArrayList left : leftAlias) { if (left.size() != 0) { leftAliasNull = false; break; @@ -1094,7 +1093,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } boolean rightAliasNull = true; - for (Vector right : rightAlias) { + for (ArrayList right : rightAlias) { if (right.size() != 0) { rightAliasNull = false; break; @@ -1432,7 +1431,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { StringBuilder inpColumns = new StringBuilder(); StringBuilder inpColumnTypes = new StringBuilder(); - Vector inputSchema = opParseCtx.get(input).getRR() + ArrayList inputSchema = opParseCtx.get(input).getRR() .getColumnInfos(); for (int i = 0; i < inputSchema.size(); ++i) { if (i != 0) { @@ -3045,7 +3044,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } String cols = new String(); String colTypes = new String(); - Vector colInfos = inputRR.getColumnInfos(); + ArrayList colInfos = inputRR.getColumnInfos(); // CTAS case: the file output format and serde are defined by the create // table command @@ -3137,7 +3136,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { input = genConversionSelectOperator(dest, qb, input, table_desc); inputRR = opParseCtx.get(input).getRR(); - Vector vecCol = new Vector(); + ArrayList vecCol = new ArrayList(); try { StructObjectInspector rowObjectInspector = (StructObjectInspector) table_desc @@ -3184,7 +3183,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { // Check column number List tableFields = oi.getAllStructFieldRefs(); - Vector rowFields = opParseCtx.get(input).getRR() + ArrayList rowFields = opParseCtx.get(input).getRR() .getColumnInfos(); if (tableFields.size() != rowFields.size()) { String reason = "Table " + dest + " has " + tableFields.size() @@ -3313,7 +3312,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { // resulting output object inspector can be used to make the RowResolver // for the UDTF operator RowResolver selectRR = opParseCtx.get(input).getRR(); - Vector inputCols = selectRR.getColumnInfos(); + ArrayList inputCols = selectRR.getColumnInfos(); // Create the object inspector for the input columns and initialize the UDTF ArrayList colNames = new ArrayList(); @@ -3585,7 +3584,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { ArrayList reduceKeys = new ArrayList(); // Compute join keys and store in reduceKeys - Vector exprs = joinTree.getExpressions().get(pos); + ArrayList exprs = joinTree.getExpressions().get(pos); for (int i = 0; i < exprs.size(); i++) { ASTNode expr = exprs.get(i); reduceKeys.add(genExprNodeDesc(expr, inputRS)); @@ -3645,7 +3644,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { Operator joinSrcOp = null; if (leftChild != null) { Operator joinOp = genJoinOperator(qb, leftChild, map); - Vector filter = joinTree.getFilters().get(0); + ArrayList filter = joinTree.getFilters().get(0); for (ASTNode cond : filter) { joinOp = genFilterPlan(qb, cond, joinOp); } @@ -3849,7 +3848,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { */ private void pushJoinFilters(QB qb, QBJoinTree joinTree, HashMap map) throws SemanticException { - Vector> filters = joinTree.getFilters(); + ArrayList> filters = joinTree.getFilters(); if (joinTree.getJoinSrc() != null) { pushJoinFilters(qb, joinTree.getJoinSrc(), map); } @@ -3858,7 +3857,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { for (String src : joinTree.getBaseSrc()) { if (src != null) { Operator srcOp = map.get(src); - Vector filter = filters.get(pos); + ArrayList filter = filters.get(pos); for (ASTNode cond : filter) { srcOp = genFilterPlan(qb, cond, srcOp); } @@ -3894,14 +3893,14 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { QBJoinTree joinTree = new QBJoinTree(); joinTree.setNoOuterJoin(false); - joinTree.setExpressions(new Vector>()); - joinTree.setFilters(new Vector>()); + joinTree.setExpressions(new ArrayList>()); + joinTree.setFilters(new ArrayList>()); // Create joinTree structures to fill them up later - Vector rightAliases = new Vector(); - Vector leftAliases = new Vector(); - Vector baseSrc = new Vector(); - Vector preserved = new Vector(); + ArrayList rightAliases = new ArrayList(); + ArrayList leftAliases = new ArrayList(); + ArrayList baseSrc = new ArrayList(); + ArrayList preserved = new ArrayList(); boolean lastPreserved = false; int cols = -1; @@ -3940,8 +3939,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { + "number of keys in UNIQUEJOIN"); } - Vector expressions = new Vector(); - Vector filt = new Vector(); + ArrayList expressions = new ArrayList(); + ArrayList filt = new ArrayList(); for (Node exp : child.getChildren()) { expressions.add((ASTNode) exp); @@ -4064,18 +4063,18 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { assert false; } - Vector> expressions = new Vector>(); - expressions.add(new Vector()); - expressions.add(new Vector()); + ArrayList> expressions = new ArrayList>(); + expressions.add(new ArrayList()); + expressions.add(new ArrayList()); joinTree.setExpressions(expressions); - Vector> filters = new Vector>(); - filters.add(new Vector()); - filters.add(new Vector()); + ArrayList> filters = new ArrayList>(); + filters.add(new ArrayList()); + filters.add(new ArrayList()); joinTree.setFilters(filters); ASTNode joinCond = (ASTNode) joinParseTree.getChild(2); - Vector leftSrc = new Vector(); + ArrayList leftSrc = new ArrayList(); parseJoinCondition(joinTree, joinCond, leftSrc); if (leftSrc.size() == 1) { joinTree.setLeftAlias(leftSrc.get(0)); @@ -4163,18 +4162,18 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } target.setBaseSrc(baseSrc); - Vector> expr = target.getExpressions(); + ArrayList> expr = target.getExpressions(); for (int i = 0; i < nodeRightAliases.length; i++) { expr.add(node.getExpressions().get(i + 1)); } - Vector> filter = target.getFilters(); + ArrayList> filter = target.getFilters(); for (int i = 0; i < nodeRightAliases.length; i++) { filter.add(node.getFilters().get(i + 1)); } if (node.getFilters().get(0).size() != 0) { - Vector filterPos = filter.get(pos); + ArrayList filterPos = filter.get(pos); filterPos.addAll(node.getFilters().get(0)); } @@ -4238,8 +4237,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { return -1; } - Vector nodeCondn = node.getExpressions().get(0); - Vector targetCondn = null; + ArrayList nodeCondn = node.getExpressions().get(0); + ArrayList targetCondn = null; if (leftAlias.equals(target.getLeftAlias())) { targetCondn = target.getExpressions().get(0); @@ -4307,7 +4306,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { throws SemanticException { OpParseContext inputCtx = opParseCtx.get(input); RowResolver inputRR = inputCtx.getRR(); - Vector columns = inputRR.getColumnInfos(); + ArrayList columns = inputRR.getColumnInfos(); ArrayList colList = new ArrayList(); ArrayList columnNames = new ArrayList(); for (int i = 0; i < columns.size(); i++) { @@ -5147,8 +5146,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { */ private void LVmergeRowResolvers(RowResolver source, RowResolver dest, ArrayList outputInternalColNames) { - Vector cols = source.getColumnInfos(); - for (ColumnInfo c : cols) { + for (ColumnInfo c : source.getColumnInfos()) { String internalName = getColumnInternalName(outputInternalColNames.size()); outputInternalColNames.add(internalName); ColumnInfo newCol = new ColumnInfo(internalName, c.getType(), c diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java index 2b5732f..3aebd3f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java @@ -108,4 +108,4 @@ public class AddPartitionDesc { this.partSpec = partSpec; } -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index ed6af76..f861065 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -25,7 +25,6 @@ import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Properties; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -338,7 +337,7 @@ public class PlanUtils { */ public static List getFieldSchemasFromRowSchema(RowSchema row, String fieldPrefix) { - Vector c = row.getSignature(); + ArrayList c = row.getSignature(); return getFieldSchemasFromColumnInfo(c, fieldPrefix); } @@ -346,7 +345,7 @@ public class PlanUtils { * Convert the ColumnInfo to FieldSchema. */ public static List getFieldSchemasFromColumnInfo( - Vector cols, String fieldPrefix) { + ArrayList cols, String fieldPrefix) { if ((cols == null) || (cols.size() == 0)) { return new ArrayList(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java index 704d2c5..1b1a525 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConv.java @@ -190,4 +190,4 @@ public class UDFConv extends UDF { result.set(value, first, value.length - first); return result; } -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/htree/HashBucket.java ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/htree/HashBucket.java index 723835b..c7d4c84 100644 --- ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/htree/HashBucket.java +++ ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/htree/HashBucket.java @@ -287,7 +287,7 @@ final class HashBucket extends HashNode implements Externalizable { @Override public String toString() { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); buf.append("HashBucket {depth="); buf.append(_depth); buf.append(", keys="); diff --git ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java index c7ce77d..359de6c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java @@ -224,7 +224,7 @@ public class QTestUtil { FileInputStream fis = new FileInputStream(qf); BufferedInputStream bis = new BufferedInputStream(fis); DataInputStream dis = new DataInputStream(bis); - StringBuffer qsb = new StringBuffer(); + StringBuilder qsb = new StringBuilder(); // Read the entire query while (dis.available() != 0) { diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java index 54e3ee1..9e75c51 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java @@ -36,4 +36,4 @@ public class UDFTestLength extends UDF { result.set(s.toString().length()); return result; } -} \ No newline at end of file +} diff --git serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefWritable.java serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefWritable.java index e4ca344..ab1e7ab 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefWritable.java +++ serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefWritable.java @@ -185,7 +185,7 @@ public class BytesRefWritable implements Writable, Comparable /** {@inheritDoc} */ @Override public String toString() { - StringBuffer sb = new StringBuffer(3 * length); + StringBuilder sb = new StringBuilder(3 * length); for (int idx = start; idx < length; idx++) { // if not the first, put a blank separator in if (idx != 0) { @@ -245,4 +245,4 @@ public class BytesRefWritable implements Writable, Comparable public int getStart() { return start; } -} \ No newline at end of file +} diff --git serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java index 76ef207..e674a4f 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeFieldList.java @@ -275,7 +275,7 @@ public class DynamicSerDeFieldList extends DynamicSerDeSimpleNode implements @Override public String toString() { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); String prefix = ""; for (DynamicSerDeField t : getChildren()) { result.append(prefix + t.fieldid + ":" diff --git serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java index 923ed40..323e258 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java @@ -90,7 +90,7 @@ public class ParseException extends Exception { if (!specialConstructor) { return super.getMessage(); } - StringBuffer expected = new StringBuffer(); + StringBuilder expected = new StringBuilder(); int maxSize = 0; for (int[] expectedTokenSequence : expectedTokenSequences) { if (maxSize < expectedTokenSequence.length) { @@ -139,7 +139,7 @@ public class ParseException extends Exception { * version cannot be used as part of an ASCII string literal. */ protected String add_escapes(String str) { - StringBuffer retval = new StringBuffer(); + StringBuilder retval = new StringBuilder(); char ch; for (int i = 0; i < str.length(); i++) { switch (str.charAt(i)) { diff --git serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java index e370a03..5a66c08 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/TokenMgrError.java @@ -37,7 +37,7 @@ public class TokenMgrError extends Error { * equivalents in the given string */ protected static final String addEscapes(String str) { - StringBuffer retval = new StringBuffer(); + StringBuilder retval = new StringBuilder(); char ch; for (int i = 0; i < str.length(); i++) { switch (str.charAt(i)) { diff --git serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java index 29c8c38..6d8449f 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java +++ serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java @@ -331,4 +331,4 @@ public class LazyBinaryMap extends } return mapSize; } -} \ No newline at end of file +} diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java index aa02dc0..f86f50b 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java @@ -298,7 +298,7 @@ public class ObjectInspectorUtils { + getObjectInspectorName(moi.getMapValueObjectInspector()) + ">"; } case STRUCT: { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); result.append(oi.getClass().getSimpleName() + "<"); StructObjectInspector soi = (StructObjectInspector) oi; List fields = soi.getAllStructFieldRefs(); diff --git serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java index 354dcd1..897a6b8 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java +++ serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java @@ -247,7 +247,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements } public String nextToken() throws EOFException { - StringBuffer ret = null; + StringBuilder ret = null; boolean done = false; if (tokenizer == null) { @@ -267,7 +267,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements if (nextToken.equals(separator)) { done = true; } else if (ret == null) { - ret = new StringBuffer(nextToken); + ret = new StringBuilder(nextToken); } else { ret.append(nextToken); } diff --git service/src/java/org/apache/hadoop/hive/service/HiveServer.java service/src/java/org/apache/hadoop/hive/service/HiveServer.java index 00fefcf..6705d29 100644 --- service/src/java/org/apache/hadoop/hive/service/HiveServer.java +++ service/src/java/org/apache/hadoop/hive/service/HiveServer.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.service; import java.io.IOException; +import java.util.ArrayList; import java.util.List; -import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -229,7 +229,7 @@ public class HiveServer extends ThriftHive { return ""; } - Vector result = new Vector(); + ArrayList result = new ArrayList(); driver.setMaxRows(1); try { if (driver.getResults(result)) { @@ -267,10 +267,10 @@ public class HiveServer extends ThriftHive { } if (!isHiveQuery) { // Return no results if the last command was not a Hive query - return new Vector(); + return new ArrayList(); } - Vector result = new Vector(); + ArrayList result = new ArrayList(); driver.setMaxRows(numRows); try { driver.getResults(result); @@ -295,11 +295,11 @@ public class HiveServer extends ThriftHive { public List fetchAll() throws HiveServerException, TException { if (!isHiveQuery) { // Return no results if the last command was not a Hive query - return new Vector(); + return new ArrayList(); } - Vector rows = new Vector(); - Vector result = new Vector(); + ArrayList rows = new ArrayList(); + ArrayList result = new ArrayList(); try { while (driver.getResults(result)) { rows.addAll(result);