diff --git a/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java b/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java index 0a451ae..31f37e7 100644 --- a/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java +++ b/hcatalog/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java @@ -105,7 +105,8 @@ protected void map(WritableComparable key, HCatRecord value, protected void reduce(IntWritable key, java.lang.Iterable values, org.apache.hadoop.mapreduce.Reducer.Context context) throws IOException, InterruptedException { - String output = key.toString() + TAB; + StringBuffer output = new StringBuffer(key.toString()); + output.append(TAB); Long sumid = 0l; Long sumintnum5 = 0l; Long sumintnum100 = 0l; @@ -122,14 +123,21 @@ protected void reduce(IntWritable key, java.lang.Iterable sumfloatnum += value.floatnum.get(); sumdoublenum += value.doublenum.get(); } - output += sumid + TAB; - output += sumintnum5 + TAB; - output += sumintnum100 + TAB; - output += sumintnum + TAB; - output += sumlongnum + TAB; - output += sumfloatnum + TAB; - output += sumdoublenum + TAB; - context.write(dummyLong, new Text(output)); + output.append(sumid); + output.append(TAB); + output.append(sumintnum5); + output.append(TAB); + output.append(sumintnum100); + output.append(TAB); + output.append(sumintnum); + output.append(TAB); + output.append(sumlongnum); + output.append(TAB); + output.append(sumfloatnum); + output.append(TAB); + output.append(sumdoublenum); + output.append(TAB); + context.write(dummyLong, new Text(output.toString())); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index cd872b2..834ca95 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -324,10 +324,14 @@ private MapRedStats progress(ExecDriverTaskHandle th) throws IOException { } } } - - String report = " " + getId() + " map = " + mapProgress + "%, reduce = " + reduceProgress - + "%"; - + StringBuffer report = new StringBuffer(" "); + report.append(getId()); + report.append(" "); + report.append(" map = "); + report.append(mapProgress); + report.append( "%, reduce = "); + report.append(reduceProgress); + report.append("%"); if (!report.equals(lastReport) || System.currentTimeMillis() >= reportTime + maxReportInterval) { @@ -341,15 +345,16 @@ private MapRedStats progress(ExecDriverTaskHandle th) throws IOException { long newCpuMSec = counterCpuMsec.getValue(); if (newCpuMSec > 0) { cpuMsec = newCpuMSec; - report += ", Cumulative CPU " - + (cpuMsec / 1000D) + " sec"; + report.append(", Cumulative CPU "); + report.append((cpuMsec / 1000D)); + report.append(" sec"); } } } // write out serialized plan with counters to log file // LOG.info(queryPlan); - String output = dateFormat.format(Calendar.getInstance().getTime()) + report; + String output = dateFormat.format(Calendar.getInstance().getTime()) + report.toString(); SessionState ss = SessionState.get(); if (ss != null) { ss.getHiveHistory().setTaskCounters(SessionState.get().getQueryId(), getId(), ctrs); @@ -361,7 +366,7 @@ private MapRedStats progress(ExecDriverTaskHandle th) throws IOException { } } console.printInfo(output); - lastReport = report; + lastReport = report.toString(); reportTime = System.currentTimeMillis(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java index 76cd848..63036a0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java @@ -183,17 +183,21 @@ else if (pathNames.length == 2) { return pathNames[0] + "@" + pathNames[1]; } - String ret = pathNames[0] + "@" + pathNames[1] + "@"; + StringBuffer ret = new StringBuffer(""); + ret.append(pathNames[0]); + ret.append("@"); + ret.append(pathNames[1]); + ret.append("@"); boolean first = true; for (int i = 2; i < pathNames.length; i++) { if (!first) { - ret = ret + "/"; + ret.append("/"); } else { first = false; } - ret = ret + pathNames[i]; + ret.append(pathNames[i]); } - return ret; + return ret.toString(); } public HiveLockObjectData getData() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java index b7b48d4..375ca18 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java @@ -166,13 +166,15 @@ private static String getLastObjectName(String parent, HiveLockObject key) { **/ private List getObjectNames(HiveLockObject key) { List parents = new ArrayList(); - String curParent = "/" + parent + "/"; + StringBuffer curParent = new StringBuffer("/"); + curParent.append(parent); + curParent.append("/"); String[] names = key.getName().split("/"); for (String name : names) { - curParent = curParent + name; - parents.add(curParent); - curParent = curParent + "/"; + curParent.append(name); + parents.add(curParent.toString()); + curParent.append("/"); } return parents; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java index 2eac129..5847748 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java @@ -171,7 +171,7 @@ private void handlePartialScanCommand(TableScanOperator op, GenMRProcContext ctx ParseContext parseCtx, Task currTask, QBParseInfo parseInfo, StatsWork statsWork, Task statsTask) throws SemanticException { - String aggregationKey = op.getConf().getStatsAggPrefix(); + StringBuffer aggregationKey = new StringBuffer(op.getConf().getStatsAggPrefix()); List inputPaths = new ArrayList(); switch (parseInfo.getTableSpec().specType) { case TABLE_ONLY: @@ -180,7 +180,7 @@ private void handlePartialScanCommand(TableScanOperator op, GenMRProcContext ctx case STATIC_PARTITION: Partition part = parseInfo.getTableSpec().partHandle; try { - aggregationKey += Warehouse.makePartPath(part.getSpec()); + aggregationKey.append(Warehouse.makePartPath(part.getSpec())); } catch (MetaException e) { throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_AGGKEY.getMsg( part.getPartitionPath().toString() + e.getMessage())); @@ -194,7 +194,7 @@ private void handlePartialScanCommand(TableScanOperator op, GenMRProcContext ctx // scan work PartialScanWork scanWork = new PartialScanWork(inputPaths); scanWork.setMapperCannotSpanPartns(true); - scanWork.setAggKey(aggregationKey); + scanWork.setAggKey(aggregationKey.toString()); // stats work statsWork.setPartialScanAnalyzeCommand(true); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index 9b839f5..fd59225 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -408,9 +408,10 @@ private static ReadEntity getParentViewInfo(String alias_id, // For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T // -> implies depends on. // T's parent would be V1 + ReadEntity input; for (int pos = 0; pos < aliases.length; pos++) { currentAlias = currentAlias == null ? aliases[pos] : currentAlias + ":" + aliases[pos]; - ReadEntity input = viewAliasToInput.get(currentAlias); + input = viewAliasToInput.get(currentAlias); if (input == null) { return currentInput; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java index cd1b4ad..81eca5f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java @@ -481,18 +481,20 @@ public static MapJoinOperator convertMapJoin( valueTableDescs.add(valueTableDesc); valueFiltedTableDescs.add(valueFilteredTableDesc); } - String dumpFilePrefix = ""; + StringBuffer dumpFilePrefix = new StringBuffer(""); if( joinTree.getMapAliases() != null ) { for(String mapAlias : joinTree.getMapAliases()) { - dumpFilePrefix = dumpFilePrefix + mapAlias; + dumpFilePrefix.append(mapAlias); } - dumpFilePrefix = dumpFilePrefix+"-"+PlanUtils.getCountForMapJoinDumpFilePrefix(); + dumpFilePrefix.append("-"); + dumpFilePrefix.append(PlanUtils.getCountForMapJoinDumpFilePrefix()); } else { - dumpFilePrefix = "mapfile"+PlanUtils.getCountForMapJoinDumpFilePrefix(); + dumpFilePrefix.append("mapfile"); + dumpFilePrefix.append(PlanUtils.getCountForMapJoinDumpFilePrefix()); } MapJoinDesc mapJoinDescriptor = new MapJoinDesc(keyExprMap, keyTableDesc, valueExprMap, valueTableDescs, valueFiltedTableDescs, outputColumnNames, mapJoinPos, joinCondns, - filters, op.getConf().getNoOuterJoin(), dumpFilePrefix); + filters, op.getConf().getNoOuterJoin(), dumpFilePrefix.toString()); mapJoinDescriptor.setTagOrder(tagOrder); mapJoinDescriptor.setNullSafes(desc.getNullSafes()); mapJoinDescriptor.setFilterMap(desc.getFilterMap()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java index ee4d4d1..631c04e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java @@ -171,36 +171,38 @@ public static void processSkewJoin(JoinOperator joinOp, for (int i = 0; i < numAliases; i++) { Byte alias = tags[i]; List valueCols = joinValues.get(alias); - String colNames = ""; - String colTypes = ""; + StringBuffer colNames = new StringBuffer(""); + StringBuffer colTypes = new StringBuffer(""); int columnSize = valueCols.size(); List newValueExpr = new ArrayList(); List newKeyExpr = new ArrayList(); boolean first = true; + TypeInfo type; + String newColName; for (int k = 0; k < columnSize; k++) { - TypeInfo type = valueCols.get(k).getTypeInfo(); - String newColName = i + "_VALUE_" + k; // any name, it does not matter. + type = valueCols.get(k).getTypeInfo(); + newColName = i + "_VALUE_" + k; // any name, it does not matter. newValueExpr .add(new ExprNodeColumnDesc(type, newColName, "" + i, false)); if (!first) { - colNames = colNames + ","; - colTypes = colTypes + ","; + colNames.append(","); + colTypes.append(","); } first = false; - colNames = colNames + newColName; - colTypes = colTypes + valueCols.get(k).getTypeString(); + colNames.append(newColName); + colTypes.append(valueCols.get(k).getTypeString()); } // we are putting join keys at last part of the spilled table for (int k = 0; k < joinKeys.size(); k++) { if (!first) { - colNames = colNames + ","; - colTypes = colTypes + ","; + colNames.append(","); + colTypes.append(","); } first = false; - colNames = colNames + joinKeys.get(k); - colTypes = colTypes + joinKeyTypes.get(k); + colNames.append(joinKeys.get(k)); + colTypes.append(joinKeys.get(k)); newKeyExpr.add(new ExprNodeColumnDesc(TypeInfoFactory .getPrimitiveTypeInfo(joinKeyTypes.get(k)), joinKeys.get(k), "" + i, false)); @@ -208,32 +210,33 @@ public static void processSkewJoin(JoinOperator joinOp, newJoinValues.put(alias, newValueExpr); newJoinKeys.put(alias, newKeyExpr); - tableDescList.put(alias, Utilities.getTableDesc(colNames, colTypes)); + tableDescList.put(alias, Utilities.getTableDesc(colNames.toString(), colTypes.toString())); // construct value table Desc - String valueColNames = ""; - String valueColTypes = ""; + StringBuffer valueColNames = new StringBuffer(""); + StringBuffer valueColTypes = new StringBuffer(""); first = true; for (int k = 0; k < columnSize; k++) { - String newColName = i + "_VALUE_" + k; // any name, it does not matter. + newColName = i + "_VALUE_" + k; // any name, it does not matter. if (!first) { - valueColNames = valueColNames + ","; - valueColTypes = valueColTypes + ","; + valueColNames.append(","); + valueColTypes.append(","); } - valueColNames = valueColNames + newColName; - valueColTypes = valueColTypes + valueCols.get(k).getTypeString(); + valueColNames.append(newColName); + valueColTypes.append(valueCols.get(k).getTypeString()); first = false; } newJoinValueTblDesc.set(Byte.valueOf((byte) i), Utilities.getTableDesc( - valueColNames, valueColTypes)); + valueColNames.toString(), valueColTypes.toString())); } joinDescriptor.setSkewKeysValuesTables(tableDescList); joinDescriptor.setKeyTableDesc(keyTblDesc); + MapWork newPlan; for (int i = 0; i < numAliases - 1; i++) { Byte src = tags[i]; - MapWork newPlan = PlanUtils.getMapRedWork().getMapWork(); + newPlan = PlanUtils.getMapRedWork().getMapWork(); // This code has been only added for testing boolean mapperCannotSpanPartns = @@ -253,8 +256,9 @@ public static void processSkewJoin(JoinOperator joinOp, } Operator[] parentOps = new TableScanOperator[tags.length]; + Operator ts; for (int k = 0; k < tags.length; k++) { - Operator ts = OperatorFactory.get( + ts = OperatorFactory.get( TableScanDesc.class, (RowSchema) null); ((TableScanOperator)ts).setTableDesc(tableDescList.get((byte)k)); parentOps[k] = ts; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java index c0a8116..94ac4a9 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java @@ -106,7 +106,7 @@ private void parseSimplePattern(String likePattern) { int beginIndex = 0; int endIndex = length; char lastChar = 'a'; - String strPattern = new String(); + StringBuffer strPattern = new StringBuffer(); type = PatternType.NONE; for (int i = 0; i < length; i++) { @@ -116,7 +116,7 @@ private void parseSimplePattern(String likePattern) { type = PatternType.COMPLEX; return; } else { // such as "abc\%de%" - strPattern += likePattern.substring(beginIndex, i - 1); + strPattern.append(likePattern.substring(beginIndex, i - 1)); beginIndex = i; } } else if (n == '%') { @@ -128,7 +128,7 @@ private void parseSimplePattern(String likePattern) { type = PatternType.COMPLEX; return; } else { // such as "abc\%de%" - strPattern += likePattern.substring(beginIndex, i - 1); + strPattern.append(likePattern.substring(beginIndex, i - 1)); beginIndex = i; } } else { @@ -140,7 +140,7 @@ private void parseSimplePattern(String likePattern) { type = PatternType.BEGIN; // such as "abc%" } } else { // such as "abc\%" - strPattern += likePattern.substring(beginIndex, i - 1); + strPattern.append(likePattern.substring(beginIndex, i - 1)); beginIndex = i; endIndex = length; } @@ -149,8 +149,8 @@ private void parseSimplePattern(String likePattern) { lastChar = n; } - strPattern += likePattern.substring(beginIndex, endIndex); - simplePattern.set(strPattern); + strPattern.append(likePattern.substring(beginIndex, endIndex)); + simplePattern.set(strPattern.toString()); } private static boolean find(Text s, Text sub, int startS, int endS) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSentences.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSentences.java index c24c0ec..b56e6f2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSentences.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSentences.java @@ -128,14 +128,16 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { @Override public String getDisplayString(String[] children) { assert (children.length >= 1 && children.length <= 3); - String display = "sentences(" + children[0]; + StringBuffer display = new StringBuffer("sentences(" + children[0]); if(children.length > 1) { - display += ", " + children[1]; + display.append(", "); + display.append(children[1]); if(children.length > 2) { - display += ", " + children[2]; + display.append(", "); + display.append(children[2]); } } - display += ")"; - return display; + display.append(")"); + return display.toString(); } } diff --git a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java index 8683496..f1ab45d 100644 --- a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java +++ b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java @@ -160,9 +160,10 @@ public void process(WatchedEvent event) { public static String ensurePath(ZooKeeper zk, String path, List acl) throws KeeperException, InterruptedException { String[] pathComps = StringUtils.splitByWholeSeparator(path, "/"); - String currentPath = ""; + StringBuffer currentPath = new StringBuffer(""); for (String pathComp : pathComps) { - currentPath += "/" + pathComp; + currentPath.append("/"); + currentPath.append(pathComp); try { String node = zk.create(currentPath, new byte[0], acl, CreateMode.PERSISTENT); @@ -170,7 +171,7 @@ public static String ensurePath(ZooKeeper zk, String path, List acl) throws } catch (KeeperException.NodeExistsException e) { } } - return currentPath; + return currentPath.toString(); } /**