diff --git a/beeline/src/java/org/apache/hive/beeline/SQLCompletor.java b/beeline/src/java/org/apache/hive/beeline/SQLCompletor.java index 916c7fc..05b5d51 100644 --- a/beeline/src/java/org/apache/hive/beeline/SQLCompletor.java +++ b/beeline/src/java/org/apache/hive/beeline/SQLCompletor.java @@ -43,36 +43,42 @@ public SQLCompletor(BeeLine beeLine, boolean skipmeta) Set completions = new TreeSet(); // add the default SQL completions - String keywords = new BufferedReader(new InputStreamReader( + StringBuilder keywords = new StringBuilder(new BufferedReader(new InputStreamReader( SQLCompletor.class.getResourceAsStream( - "sql-keywords.properties"))).readLine(); + "sql-keywords.properties"))).readLine()); // now add the keywords from the current connection try { - keywords += "," + beeLine.getDatabaseConnection().getDatabaseMetaData().getSQLKeywords(); + keywords.append(","); + keywords.append(beeLine.getDatabaseConnection().getDatabaseMetaData().getSQLKeywords()); } catch (Throwable t) { } try { - keywords += "," + beeLine.getDatabaseConnection().getDatabaseMetaData().getStringFunctions(); + keywords.append(","); + keywords.append(beeLine.getDatabaseConnection().getDatabaseMetaData().getStringFunctions()); } catch (Throwable t) { } try { - keywords += "," + beeLine.getDatabaseConnection().getDatabaseMetaData().getNumericFunctions(); + keywords.append(","); + keywords.append(beeLine.getDatabaseConnection().getDatabaseMetaData().getNumericFunctions()); } catch (Throwable t) { } try { - keywords += "," + beeLine.getDatabaseConnection().getDatabaseMetaData().getSystemFunctions(); + keywords.append(","); + keywords.append(beeLine.getDatabaseConnection().getDatabaseMetaData().getSystemFunctions()); } catch (Throwable t) { } try { - keywords += "," + beeLine.getDatabaseConnection().getDatabaseMetaData().getTimeDateFunctions(); + keywords.append(","); + keywords.append(beeLine.getDatabaseConnection().getDatabaseMetaData().getTimeDateFunctions()); } catch (Throwable t) { } // also allow lower-case versions of all the keywords - keywords += "," + keywords.toLowerCase(); + keywords.append(","); + keywords.append(keywords.toString().toLowerCase()); - for (StringTokenizer tok = new StringTokenizer(keywords, ", "); tok.hasMoreTokens(); completions + for (StringTokenizer tok = new StringTokenizer(keywords.toString(), ", "); tok.hasMoreTokens(); completions .add(tok.nextToken())) { ; } diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index aeced48..f1c0363 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -397,24 +397,25 @@ public void handle(Signal signal) { try { int lastRet = 0, ret = 0; - String command = ""; + StringBuilder command = new StringBuilder(""); for (String oneCmd : line.split(";")) { if (StringUtils.endsWith(oneCmd, "\\")) { - command += StringUtils.chop(oneCmd) + ";"; + command.append(StringUtils.chop(oneCmd)); + command.append(";"); continue; } else { - command += oneCmd; + command.append(oneCmd); } - if (StringUtils.isBlank(command)) { + if (StringUtils.isBlank(command.toString())) { continue; } - ret = processCmd(command); + ret = processCmd(command.toString()); //wipe cli query state SessionState ss = SessionState.get(); ss.setCommandType(null); - command = ""; + command = new StringBuilder(""); lastRet = ret; boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS); if (ret != 0 && !ignoreErrors) { @@ -767,24 +768,24 @@ private int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor o int ret = 0; - String prefix = ""; + StringBuilder prefix = new StringBuilder(""); String curDB = getFormattedDb(conf, ss); String curPrompt = prompt + curDB; String dbSpaces = spacesForString(curDB); while ((line = reader.readLine(curPrompt + "> ")) != null) { - if (!prefix.equals("")) { - prefix += '\n'; + if (!prefix.toString().equals("")) { + prefix.append('\n'); } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { - line = prefix + line; + line = prefix.toString() + line; ret = cli.processLine(line, true); - prefix = ""; + prefix = new StringBuilder(""); curDB = getFormattedDb(conf, ss); curPrompt = prompt + curDB; dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB); } else { - prefix = prefix + line; + prefix.append(line); curPrompt = prompt2 + dbSpaces; continue; } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 39dda92..29f7a7b 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -3106,20 +3106,22 @@ private Index get_index_by_name_core(final RawStore ms, final String db_name, private String lowerCaseConvertPartName(String partName) throws MetaException { boolean isFirst = true; Map partSpec = Warehouse.makeEscSpecFromName(partName); - String convertedPartName = new String(); + StringBuilder convertedPartName = new StringBuilder(); for (Map.Entry entry : partSpec.entrySet()) { String partColName = entry.getKey(); String partColVal = entry.getValue(); if (!isFirst) { - convertedPartName += "/"; + convertedPartName.append("/"); } else { isFirst = false; } - convertedPartName += partColName.toLowerCase() + "=" + partColVal; + convertedPartName.append(partColName.toLowerCase()); + convertedPartName.append("="); + convertedPartName.append(partColVal); } - return convertedPartName; + return convertedPartName.toString(); } public ColumnStatistics get_table_column_statistics(String dbName, String tableName, diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 6ca3f22..4f9eede 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -515,19 +515,21 @@ public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaExc // Take the pattern and split it on the | to get all the composing // patterns String[] subpatterns = pattern.trim().split("\\|"); - String query = "select name from org.apache.hadoop.hive.metastore.model.MDatabase where ("; + StringBuilder query = new StringBuilder("select name from org.apache.hadoop.hive.metastore.model.MDatabase where ("); boolean first = true; for (String subpattern : subpatterns) { subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*"); if (!first) { - query = query + " || "; + query.append(" || "); } - query = query + " name.matches(\"" + subpattern + "\")"; + query.append(" name.matches(\""); + query.append(subpattern); + query.append("\")"); first = false; } - query = query + ")"; + query.append(")"); - Query q = pm.newQuery(query); + Query q = pm.newQuery(query.toString()); q.setResult("name"); q.setOrdering("name ascending"); Collection names = (Collection) q.execute(); @@ -775,21 +777,23 @@ public Table getTable(String dbName, String tableName) throws MetaException { // Take the pattern and split it on the | to get all the composing // patterns String[] subpatterns = pattern.trim().split("\\|"); - String query = - "select tableName from org.apache.hadoop.hive.metastore.model.MTable " - + "where database.name == dbName && ("; + StringBuilder query = + new StringBuilder("select tableName from org.apache.hadoop.hive.metastore.model.MTable " + + "where database.name == dbName && ("); boolean first = true; for (String subpattern : subpatterns) { subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*"); if (!first) { - query = query + " || "; + query.append(" || "); } - query = query + " tableName.matches(\"" + subpattern + "\")"; + query.append(" tableName.matches(\""); + query.append(subpattern); + query.append("\")"); first = false; } - query = query + ")"; + query.append(")"); - Query q = pm.newQuery(query); + Query q = pm.newQuery(query.toString()); q.declareParameters("java.lang.String dbName"); q.setResult("tableName"); q.setOrdering("tableName ascending"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 3312b3b..2f0a2ed 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -739,23 +739,25 @@ public QueryPlan getPlan() { name = p.getName().split("@")[2]; } - String partialName = ""; + StringBuilder partialName = new StringBuilder(""); String[] partns = name.split("/"); int len = p instanceof DummyPartition ? partns.length : partns.length - 1; Map partialSpec = new LinkedHashMap(); + String partn; + String[] nameValue; for (int idx = 0; idx < len; idx++) { - String partn = partns[idx]; - partialName += partn; - String[] nameValue = partn.split("="); + partn = partns[idx]; + partialName.append(partn); + nameValue = partn.split("="); assert(nameValue.length == 2); partialSpec.put(nameValue[0], nameValue[1]); try { locks.add(new HiveLockObj( new HiveLockObject(new DummyPartition(p.getTable(), p.getTable().getDbName() + "/" + p.getTable().getTableName() - + "/" + partialName, + + "/" + partialName.toString(), partialSpec), lockData), mode)); - partialName += "/"; + partialName.append("/"); } catch (HiveException e) { throw new SemanticException(e.getMessage()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 4dcb260..037ef32 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -1946,12 +1946,18 @@ private int showCreateTable(Hive db, ShowCreateTableDesc showCreateTbl) throws H String tbl_columns = ""; List cols = tbl.getCols(); List columns = new ArrayList(); + StringBuilder columnDesc = new StringBuilder(); for (FieldSchema col : cols) { - String columnDesc = " " + col.getName() + " " + col.getType(); + columnDesc.append(" "); + columnDesc.append(col.getName()); + columnDesc.append(" "); + columnDesc.append(col.getType()); if (col.getComment() != null) { - columnDesc = columnDesc + " COMMENT '" + escapeHiveCommand(col.getComment()) + "'"; + columnDesc.append(" COMMENT '"); + columnDesc.append(escapeHiveCommand(col.getComment())); + columnDesc.append("'"); } - columns.add(columnDesc); + columns.add(columnDesc.toString()); } tbl_columns = StringUtils.join(columns, ", \n"); @@ -1969,13 +1975,18 @@ private int showCreateTable(Hive db, ShowCreateTableDesc showCreateTbl) throws H if (partKeys.size() > 0) { tbl_partitions += "PARTITIONED BY ( \n"; List partCols = new ArrayList(); + StringBuilder partColDesc = new StringBuilder(""); for (FieldSchema partKey : partKeys) { - String partColDesc = " " + partKey.getName() + " " + partKey.getType(); + partColDesc.append(" "); + partColDesc.append(partKey.getName()); + partColDesc.append(" "); + partColDesc.append(partKey.getType()); if (partKey.getComment() != null) { - partColDesc = partColDesc + " COMMENT '" + - escapeHiveCommand(partKey.getComment()) + "'"; + partColDesc.append(" COMMENT '"); + partColDesc.append(escapeHiveCommand(partKey.getComment())); + partColDesc.append("'"); } - partCols.add(partColDesc); + partCols.add(partColDesc.toString()); } tbl_partitions += StringUtils.join(partCols, ", \n"); tbl_partitions += ")"; @@ -1994,15 +2005,18 @@ private int showCreateTable(Hive db, ShowCreateTableDesc showCreateTbl) throws H tbl_sort_bucket += "SORTED BY ( \n"; // Order List sortKeys = new ArrayList(); + StringBuilder sortKeyDesc = new StringBuilder(""); for (Order sortCol : sortCols) { - String sortKeyDesc = " " + sortCol.getCol() + " "; + sortKeyDesc.append(" "); + sortKeyDesc.append(sortCol.getCol()); + sortKeyDesc.append(" "); if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC) { - sortKeyDesc = sortKeyDesc + "ASC"; + sortKeyDesc.append("ASC"); } else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) { - sortKeyDesc = sortKeyDesc + "DESC"; + sortKeyDesc.append("DESC"); } - sortKeys.add(sortKeyDesc); + sortKeys.add(sortKeyDesc.toString()); } tbl_sort_bucket += StringUtils.join(sortKeys, ", \n"); tbl_sort_bucket += ") \n"; @@ -2066,9 +2080,14 @@ else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) { if (serdeInfo.getParametersSize() > 0) { tbl_row_format += "WITH SERDEPROPERTIES ( \n"; List serdeCols = new ArrayList(); + StringBuilder tmp = new StringBuilder(""); for (Map.Entry entry : serdeInfo.getParameters().entrySet()) { - serdeCols.add(" '" + entry.getKey() + "'='" - + escapeHiveCommand(StringEscapeUtils.escapeJava(entry.getValue())) + "'"); + tmp.append(" '"); + tmp.append(entry.getKey()); + tmp.append("'='"); + tmp.append(escapeHiveCommand(StringEscapeUtils.escapeJava(entry.getValue()))); + tmp.append("'"); + serdeCols.add(tmp.toString()); } tbl_row_format += StringUtils.join(serdeCols, ", \n"); tbl_row_format += ")"; @@ -2081,10 +2100,15 @@ else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) { Map properties = tbl.getParameters(); if (properties.size() > 0) { List realProps = new ArrayList(); + StringBuilder tmp = new StringBuilder(""); for (String key : properties.keySet()) { if (properties.get(key) != null && !duplicateProps.contains(key)) { - realProps.add(" '" + key + "'='" + - escapeHiveCommand(StringEscapeUtils.escapeJava(properties.get(key))) + "'"); + tmp.append(" '"); + tmp.append(key); + tmp.append("'='"); + tmp.append(escapeHiveCommand(StringEscapeUtils.escapeJava(properties.get(key)))); + tmp.append("'"); + realProps.add(tmp.toString()); } } tbl_properties += StringUtils.join(realProps, ", \n"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java index 76cd848..d561bf0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java @@ -183,17 +183,20 @@ else if (pathNames.length == 2) { return pathNames[0] + "@" + pathNames[1]; } - String ret = pathNames[0] + "@" + pathNames[1] + "@"; + StringBuilder ret = new StringBuilder(pathNames[0]); + ret.append("@"); + ret.append(pathNames[1]); + ret.append("@"); boolean first = true; for (int i = 2; i < pathNames.length; i++) { if (!first) { - ret = ret + "/"; + ret.append("/"); } else { first = false; } - ret = ret + pathNames[i]; + ret.append(pathNames[i]); } - return ret; + return ret.toString(); } public HiveLockObjectData getData() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java index b7b48d4..c76c40c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java @@ -166,13 +166,13 @@ private static String getLastObjectName(String parent, HiveLockObject key) { **/ private List getObjectNames(HiveLockObject key) { List parents = new ArrayList(); - String curParent = "/" + parent + "/"; + StringBuilder curParent = new StringBuilder("/" + parent + "/"); String[] names = key.getName().split("/"); for (String name : names) { - curParent = curParent + name; - parents.add(curParent); - curParent = curParent + "/"; + curParent.append(name); + parents.add(curParent.toString()); + curParent.append("/"); } return parents; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index f1d29f8..f708f0d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -940,12 +940,13 @@ public String getCompleteName() { // different from the filesystem of the partition. FileSystem fs = FileSystem.get(getPath().toUri(), Hive.get() .getConf()); - String pathPattern = getPath().toString(); + StringBuilder pathPattern = new StringBuilder(getPath().toString()); if (getNumBuckets() > 0) { - pathPattern = pathPattern + "/*"; + pathPattern.append(pathPattern); + pathPattern.append("/*"); } - LOG.info("Path pattern = " + pathPattern); - FileStatus srcs[] = fs.globStatus(new Path(pathPattern)); + LOG.info("Path pattern = " + pathPattern.toString()); + FileStatus srcs[] = fs.globStatus(new Path(pathPattern.toString())); Arrays.sort(srcs); for (FileStatus src : srcs) { LOG.info("Got file: " + src.getPath()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java index 4bbaf2b..c07f1a7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java @@ -707,14 +707,17 @@ private static void pruneReduceSinkOperator(boolean[] retainFlags, List valueExprs = reduceConf.getValueCols(); ArrayList newValueExprs = new ArrayList(); + StringBuilder outputCol = new StringBuilder(); for (int i = 0; i < retainFlags.length; i++) { - String outputCol = valueColNames.get(i); + outputCol.append(valueColNames.get(i)); ExprNodeDesc outputColExpr = valueExprs.get(i); if (!retainFlags[i]) { - String[] nm = oldRR.reverseLookup(outputCol); + String[] nm = oldRR.reverseLookup(outputCol.toString()); if (nm == null) { - outputCol = Utilities.ReduceField.VALUE.toString() + "." + outputCol; - nm = oldRR.reverseLookup(outputCol); + outputCol.append(Utilities.ReduceField.VALUE.toString()); + outputCol.append("."); + outputCol.append(outputCol); + nm = oldRR.reverseLookup(outputCol.toString()); } // Only remove information of a column if it is not a key, @@ -727,7 +730,7 @@ private static void pruneReduceSinkOperator(boolean[] retainFlags, } } else { - newValueColNames.add(outputCol); + newValueColNames.add(outputCol.toString()); newValueExprs.add(outputColExpr); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java index cd1b4ad..e691475 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java @@ -481,19 +481,23 @@ public static MapJoinOperator convertMapJoin( valueTableDescs.add(valueTableDesc); valueFiltedTableDescs.add(valueFilteredTableDesc); } - String dumpFilePrefix = ""; + StringBuilder dumpFilePrefix = new StringBuilder(""); if( joinTree.getMapAliases() != null ) { for(String mapAlias : joinTree.getMapAliases()) { - dumpFilePrefix = dumpFilePrefix + mapAlias; + dumpFilePrefix.append(mapAlias); } - dumpFilePrefix = dumpFilePrefix+"-"+PlanUtils.getCountForMapJoinDumpFilePrefix(); + dumpFilePrefix.append(dumpFilePrefix); + dumpFilePrefix.append("-"); + dumpFilePrefix.append(PlanUtils.getCountForMapJoinDumpFilePrefix()); } else { - dumpFilePrefix = "mapfile"+PlanUtils.getCountForMapJoinDumpFilePrefix(); + dumpFilePrefix.append("mapfile"); + dumpFilePrefix.append(PlanUtils.getCountForMapJoinDumpFilePrefix()); } MapJoinDesc mapJoinDescriptor = new MapJoinDesc(keyExprMap, keyTableDesc, valueExprMap, valueTableDescs, valueFiltedTableDescs, outputColumnNames, mapJoinPos, joinCondns, - filters, op.getConf().getNoOuterJoin(), dumpFilePrefix); + filters, op.getConf().getNoOuterJoin(), dumpFilePrefix.toString()); mapJoinDescriptor.setTagOrder(tagOrder); + mapJoinDescriptor.setNullSafes(desc.getNullSafes()); mapJoinDescriptor.setFilterMap(desc.getFilterMap()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java index 1d8336f..943eb1f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java @@ -234,15 +234,15 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, //groupOpToInputTables map with the new GroupByOperator if(rewriteQueryCtx.getParseContext().getGroupOpToInputTables().containsKey(operator)){ List gbyKeyList = operator.getConf().getKeys(); - String gbyKeys = null; + StringBuilder gbyKeys = new StringBuilder(); Iterator gbyKeyListItr = gbyKeyList.iterator(); while(gbyKeyListItr.hasNext()){ ExprNodeDesc expr = gbyKeyListItr.next().clone(); if(expr instanceof ExprNodeColumnDesc){ ExprNodeColumnDesc colExpr = (ExprNodeColumnDesc)expr; - gbyKeys = colExpr.getColumn(); + gbyKeys.append(colExpr.getColumn()); if(gbyKeyListItr.hasNext()){ - gbyKeys = gbyKeys + ","; + gbyKeys.append(","); } } } @@ -252,7 +252,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, String selReplacementCommand = "select sum(`" + rewriteQueryCtx.getAggregateFunction() + "`)" + " from " + rewriteQueryCtx.getIndexName() - + " group by " + gbyKeys + " "; + + " group by " + gbyKeys.toString() + " "; //create a new ParseContext for the query to retrieve its operator tree, //and the required GroupByOperator from it ParseContext newDAGContext = RewriteParseContextGenerator.generateOperatorTree( diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java index ee4d4d1..09266c4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java @@ -168,39 +168,41 @@ public static void processSkewJoin(JoinOperator joinOp, newJoinValueTblDesc.add(null); } + StringBuilder colNames, colTypes; for (int i = 0; i < numAliases; i++) { Byte alias = tags[i]; List valueCols = joinValues.get(alias); - String colNames = ""; - String colTypes = ""; + colTypes = new StringBuilder(""); int columnSize = valueCols.size(); List newValueExpr = new ArrayList(); List newKeyExpr = new ArrayList(); boolean first = true; + colNames = new StringBuilder(""); for (int k = 0; k < columnSize; k++) { TypeInfo type = valueCols.get(k).getTypeInfo(); String newColName = i + "_VALUE_" + k; // any name, it does not matter. newValueExpr .add(new ExprNodeColumnDesc(type, newColName, "" + i, false)); if (!first) { - colNames = colNames + ","; - colTypes = colTypes + ","; + colNames.append(colNames); + colNames.append(","); + colTypes.append(","); } first = false; - colNames = colNames + newColName; - colTypes = colTypes + valueCols.get(k).getTypeString(); + colNames.append(newColName); + colTypes.append(valueCols.get(k).getTypeString()); } // we are putting join keys at last part of the spilled table for (int k = 0; k < joinKeys.size(); k++) { if (!first) { - colNames = colNames + ","; - colTypes = colTypes + ","; + colNames.append(","); + colTypes.append(","); } first = false; - colNames = colNames + joinKeys.get(k); - colTypes = colTypes + joinKeyTypes.get(k); + colNames.append(joinKeys.get(k)); + colTypes.append(joinKeyTypes.get(k)); newKeyExpr.add(new ExprNodeColumnDesc(TypeInfoFactory .getPrimitiveTypeInfo(joinKeyTypes.get(k)), joinKeys.get(k), "" + i, false)); @@ -208,24 +210,28 @@ public static void processSkewJoin(JoinOperator joinOp, newJoinValues.put(alias, newValueExpr); newJoinKeys.put(alias, newKeyExpr); - tableDescList.put(alias, Utilities.getTableDesc(colNames, colTypes)); + tableDescList.put(alias, Utilities.getTableDesc(colNames.toString(), colTypes.toString())); // construct value table Desc - String valueColNames = ""; - String valueColTypes = ""; + StringBuilder valueColNames = new StringBuilder(""); + StringBuilder valueColTypes = new StringBuilder(""); + StringBuilder newColName; first = true; for (int k = 0; k < columnSize; k++) { - String newColName = i + "_VALUE_" + k; // any name, it does not matter. + newColName = new StringBuilder(""); + newColName.append(i); + newColName.append("_VALUE"); + newColName.append(k); if (!first) { - valueColNames = valueColNames + ","; - valueColTypes = valueColTypes + ","; + valueColNames.append(","); + valueColTypes.append(","); } - valueColNames = valueColNames + newColName; - valueColTypes = valueColTypes + valueCols.get(k).getTypeString(); + valueColNames.append(newColName.toString()); + valueColTypes.append(valueCols.get(k).getTypeString()); first = false; } newJoinValueTblDesc.set(Byte.valueOf((byte) i), Utilities.getTableDesc( - valueColNames, valueColTypes)); + valueColNames.toString(), valueColTypes.toString())); } joinDescriptor.setSkewKeysValuesTables(tableDescList); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java index dfb0f48..b94e8c1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java @@ -293,13 +293,14 @@ private void generateActualTasks(HiveConf conf, List ( SELECT TEMP." - + getStatColumnName(getBasicStat()) + " FROM ( " + - " SELECT " + getStatColumnName(getBasicStat()) + " FROM " + getStatTableName() + " WHERE " - + getIdColumnName() + " = ? ) TEMP )"; - return update; + update.append(getTimestampColumnName()); + update.append(" = CURRENT_TIMESTAMP"); + update.append(" WHERE "); + update.append(JDBCStatsUtils.getIdColumnName()); + update.append(" = ? AND ? > ( SELECT TEMP."); + update.append(getStatColumnName(getBasicStat())); + update.append(" FROM ( "); + update.append(" SELECT "); + update.append(getStatColumnName(getBasicStat())); + update.append(" FROM "); + update.append(getStatTableName()); + update.append(" WHERE "); + update.append(getIdColumnName()); + update.append(" = ? ) TEMP )"); + return update.toString(); } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java index c0a8116..3639456 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java @@ -106,7 +106,7 @@ private void parseSimplePattern(String likePattern) { int beginIndex = 0; int endIndex = length; char lastChar = 'a'; - String strPattern = new String(); + StringBuilder strPattern = new StringBuilder(); type = PatternType.NONE; for (int i = 0; i < length; i++) { @@ -116,7 +116,7 @@ private void parseSimplePattern(String likePattern) { type = PatternType.COMPLEX; return; } else { // such as "abc\%de%" - strPattern += likePattern.substring(beginIndex, i - 1); + strPattern.append(likePattern.substring(beginIndex, i - 1)); beginIndex = i; } } else if (n == '%') { @@ -128,7 +128,7 @@ private void parseSimplePattern(String likePattern) { type = PatternType.COMPLEX; return; } else { // such as "abc\%de%" - strPattern += likePattern.substring(beginIndex, i - 1); + strPattern.append(likePattern.substring(beginIndex, i - 1)); beginIndex = i; } } else { @@ -140,7 +140,7 @@ private void parseSimplePattern(String likePattern) { type = PatternType.BEGIN; // such as "abc%" } } else { // such as "abc\%" - strPattern += likePattern.substring(beginIndex, i - 1); + strPattern.append(likePattern.substring(beginIndex, i - 1)); beginIndex = i; endIndex = length; } @@ -149,8 +149,8 @@ private void parseSimplePattern(String likePattern) { lastChar = n; } - strPattern += likePattern.substring(beginIndex, endIndex); - simplePattern.set(strPattern); + strPattern.append(likePattern.substring(beginIndex, endIndex)); + simplePattern.set(strPattern.toString()); } private static boolean find(Text s, Text sub, int startS, int endS) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java index 24159b8..8fd6ac7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java @@ -158,11 +158,11 @@ public void printNumDistinctValueEstimator() { * */ public Text serialize() { - String s = new String(); + StringBuffer s = new StringBuffer(); for(int i=0; i < numBitVectors; i++) { - s = s + (bitVector[i].toString()); + s.append(bitVector[i].toString()); } - return new Text(s); + return new Text(s.toString()); } /* Deserializes from string to FastBitSet; Creates a NumDistinctValueEstimator object and @@ -183,6 +183,7 @@ public Text serialize() { * adds { and } to the beginning and end of the return String. * Skip "{", "}", ",", " " in the input string. */ + StringBuffer t = new StringBuffer(); for(int i=1; i < s.length()-1;) { char c = s.charAt(i); i = i + 1; @@ -194,18 +195,17 @@ public Text serialize() { // Encountered a numeric value; Extract out the entire number if (c >= '0' && c <= '9') { - String t = new String(); - t = t + c; + t.append(c); c = s.charAt(i); i = i + 1; while (c != ',' && c!= '}') { - t = t + c; + t.append(c); c = s.charAt(i); i = i + 1; } - int bitIndex = Integer.parseInt(t); + int bitIndex = Integer.parseInt(t.toString()); assert(bitIndex >= 0); assert(vectorIndex < numBitVectors); b[vectorIndex].set(bitIndex); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java index 23180cf..e40dc85 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java @@ -70,17 +70,18 @@ private static void setReadColumnIDConf(Configuration conf, String id) { } private static String toReadColumnIDString(List ids) { - String id = null; + StringBuilder id = new StringBuilder(""); if (ids != null) { for (int i = 0; i < ids.size(); i++) { if (i == 0) { - id = "" + ids.get(i); + id.append(ids.get(i)); } else { - id = id + StringUtils.COMMA_STR + ids.get(i); + id.append(StringUtils.COMMA_STR); + id.append(ids.get(i)); } } } - return id; + return id.toString(); } /** diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java index d7fde21..dc94ed5 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java @@ -104,29 +104,34 @@ public String getMessage() { } expected.append(eol).append(" "); } - String retval = "Encountered \""; + StringBuffer retval = new StringBuffer("Encountered \""); Token tok = currentToken.next; for (int i = 0; i < maxSize; i++) { if (i != 0) { - retval += " "; + retval.append(" "); } if (tok.kind == 0) { - retval += tokenImage[0]; + retval.append(tokenImage[0]); break; } - retval += add_escapes(tok.image); + retval.append(add_escapes(tok.image)); tok = tok.next; } - retval += "\" at line " + currentToken.next.beginLine + ", column " - + currentToken.next.beginColumn; - retval += "." + eol; + retval.append("\" at line " + currentToken.next.beginLine + ", column "); + retval.append(currentToken.next.beginColumn); + retval.append("."); + retval.append(eol); if (expectedTokenSequences.length == 1) { - retval += "Was expecting:" + eol + " "; + retval.append("Was expecting:"); + retval.append(eol); + retval.append(" "); } else { - retval += "Was expecting one of:" + eol + " "; + retval.append("Was expecting one of:"); + retval.append(eol); + retval.append(" "); } - retval += expected.toString(); - return retval; + retval.append(expected.toString()); + return retval.toString(); } /** diff --git a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java index 8683496..b96b596 100644 --- a/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java +++ b/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java @@ -160,17 +160,18 @@ public void process(WatchedEvent event) { public static String ensurePath(ZooKeeper zk, String path, List acl) throws KeeperException, InterruptedException { String[] pathComps = StringUtils.splitByWholeSeparator(path, "/"); - String currentPath = ""; + StringBuilder currentPath = new StringBuilder(""); for (String pathComp : pathComps) { - currentPath += "/" + pathComp; + currentPath.append("/"); + currentPath.append(pathComp); try { - String node = zk.create(currentPath, new byte[0], acl, + String node = zk.create(currentPath.toString(), new byte[0], acl, CreateMode.PERSISTENT); LOGGER.info("Created path: " + node); } catch (KeeperException.NodeExistsException e) { } } - return currentPath; + return currentPath.toString(); } /**