diff --git common/src/java/org/apache/hive/common/util/HiveStringUtils.java common/src/java/org/apache/hive/common/util/HiveStringUtils.java index 72c3fa9..507e369 100644 --- common/src/java/org/apache/hive/common/util/HiveStringUtils.java +++ common/src/java/org/apache/hive/common/util/HiveStringUtils.java @@ -77,6 +77,15 @@ }).with( new LookupTranslator(EntityArrays.JAVA_CTRL_CHARS_ESCAPE())); + private static final CharSequenceTranslator ESCAPE_HIVE_COMMAND = + new LookupTranslator( + new String[][] { + {"'", "\\'"}, + {";", "\\;"}, + {"\\", "\\\\"}, + }).with( + new LookupTranslator(EntityArrays.JAVA_CTRL_CHARS_ESCAPE())); + /** * Maintain a String pool to reduce memory. */ @@ -622,7 +631,19 @@ public static String escapeString(String str, char escapeChar, */ public static String escapeJava(String str) { return ESCAPE_JAVA.translate(str); -} + } + + /** + * Escape non-unicode characters, and ', and ; + * Like StringEscapeUtil.escapeJava() will escape + * unicode characters as well but in some cases it's not desired. + * + * @param str Original string + * @return Escaped string + */ + public static String escapeHiveCommand(String str) { + return ESCAPE_HIVE_COMMAND.translate(str); + } /** * Unescape commas in the string using the default escape char diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index a59b781..57cd727 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -50,7 +50,6 @@ import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; @@ -228,6 +227,7 @@ import org.apache.hadoop.tools.HadoopArchives; import org.apache.hadoop.util.ToolRunner; import org.apache.hive.common.util.AnnotationUtils; +import org.apache.hive.common.util.HiveStringUtils; import org.apache.hive.common.util.ReflectionUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -2046,7 +2046,8 @@ private int showCreateDatabase(Hive db, DataOutputStream outStream, String datab createDb_str.append("CREATE DATABASE `").append(database.getName()).append("`\n"); if (database.getDescription() != null) { createDb_str.append("COMMENT\n '"); - createDb_str.append(escapeHiveCommand(database.getDescription())).append("'\n"); + createDb_str.append( + HiveStringUtils.escapeHiveCommand(database.getDescription())).append("'\n"); } createDb_str.append("LOCATION\n '"); createDb_str.append(database.getLocationUri()).append("'\n"); @@ -2144,7 +2145,8 @@ private int showCreateTable(Hive db, DataOutputStream outStream, String tableNam for (FieldSchema col : cols) { String columnDesc = " `" + col.getName() + "` " + col.getType(); if (col.getComment() != null) { - columnDesc = columnDesc + " COMMENT '" + escapeHiveCommand(col.getComment()) + "'"; + columnDesc = columnDesc + " COMMENT '" + + HiveStringUtils.escapeHiveCommand(col.getComment()) + "'"; } columns.add(columnDesc); } @@ -2155,7 +2157,8 @@ private int showCreateTable(Hive db, DataOutputStream outStream, String tableNam String tabComment = tbl.getProperty("comment"); if (tabComment != null) { duplicateProps.add("comment"); - tbl_comment = "COMMENT '" + escapeHiveCommand(tabComment) + "'"; + tbl_comment = "COMMENT '" + + HiveStringUtils.escapeHiveCommand(tabComment) + "'"; } // Partitions @@ -2167,8 +2170,8 @@ private int showCreateTable(Hive db, DataOutputStream outStream, String tableNam for (FieldSchema partKey : partKeys) { String partColDesc = " `" + partKey.getName() + "` " + partKey.getType(); if (partKey.getComment() != null) { - partColDesc = partColDesc + " COMMENT '" + - escapeHiveCommand(partKey.getComment()) + "'"; + partColDesc = partColDesc + " COMMENT '" + + HiveStringUtils.escapeHiveCommand(partKey.getComment()) + "'"; } partCols.add(partColDesc); } @@ -2211,7 +2214,8 @@ else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) { SerDeInfo serdeInfo = sd.getSerdeInfo(); Map serdeParams = serdeInfo.getParameters(); tbl_row_format.append("ROW FORMAT SERDE \n"); - tbl_row_format.append(" '" + escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n"); + tbl_row_format.append(" '" + + HiveStringUtils.escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n"); if (tbl.getStorageHandler() == null) { // If serialization.format property has the default value, it will not to be included in // SERDE properties @@ -2222,20 +2226,21 @@ else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) { if (!serdeParams.isEmpty()) { appendSerdeParams(tbl_row_format, serdeParams).append(" \n"); } - tbl_row_format.append("STORED AS INPUTFORMAT \n '" + - escapeHiveCommand(sd.getInputFormat()) + "' \n"); - tbl_row_format.append("OUTPUTFORMAT \n '" + - escapeHiveCommand(sd.getOutputFormat()) + "'"); + tbl_row_format.append("STORED AS INPUTFORMAT \n '" + + HiveStringUtils.escapeHiveCommand(sd.getInputFormat()) + "' \n"); + tbl_row_format.append("OUTPUTFORMAT \n '" + + HiveStringUtils.escapeHiveCommand(sd.getOutputFormat()) + "'"); } else { duplicateProps.add(META_TABLE_STORAGE); - tbl_row_format.append("STORED BY \n '" + escapeHiveCommand(tbl.getParameters().get( + tbl_row_format.append("STORED BY \n '" + + HiveStringUtils.escapeHiveCommand(tbl.getParameters().get( META_TABLE_STORAGE)) + "' \n"); // SerDe Properties if (!serdeParams.isEmpty()) { appendSerdeParams(tbl_row_format, serdeInfo.getParameters()); } } - String tbl_location = " '" + escapeHiveCommand(sd.getLocation()) + "'"; + String tbl_location = " '" + HiveStringUtils.escapeHiveCommand(sd.getLocation()) + "'"; // Table properties duplicateProps.addAll(Arrays.asList(StatsSetupConst.TABLE_PARAMS_STATS_KEYS)); @@ -2271,7 +2276,7 @@ private String propertiesToString(Map props, List exclud for (String key : properties.keySet()) { if (properties.get(key) != null && (exclude == null || !exclude.contains(key))) { realProps.add(" '" + key + "'='" + - escapeHiveCommand(StringEscapeUtils.escapeJava(properties.get(key))) + "'"); + HiveStringUtils.escapeHiveCommand(properties.get(key)) + "'"); } } prop_string += StringUtils.join(realProps, ", \n"); @@ -2285,7 +2290,7 @@ private StringBuilder appendSerdeParams(StringBuilder builder, Map serdeCols = new ArrayList(); for (Entry entry : serdeParam.entrySet()) { serdeCols.add(" '" + entry.getKey() + "'='" - + escapeHiveCommand(StringEscapeUtils.escapeJava(entry.getValue())) + "'"); + + HiveStringUtils.escapeHiveCommand(entry.getValue()) + "'"); } builder.append(StringUtils.join(serdeCols, ", \n")).append(')'); return builder; @@ -2312,6 +2317,10 @@ private int showIndexes(Hive db, ShowIndexesDesc showIndexes) throws HiveExcepti indexes = db.getIndexes(tbl.getDbName(), tbl.getTableName(), (short) -1); + // In case the query is served by HiveServer2, don't pad it with spaces, + // as HiveServer2 output is consumed by JDBC/ODBC clients. + boolean isOutputPadded = !SessionState.get().isHiveServerQuery(); + // write the results in the file DataOutputStream outStream = getOutputStream(showIndexes.getResFile()); try { @@ -2324,7 +2333,7 @@ private int showIndexes(Hive db, ShowIndexesDesc showIndexes) throws HiveExcepti for (Index index : indexes) { - outStream.write(MetaDataFormatUtils.getAllColumnsInformation(index).getBytes(StandardCharsets.UTF_8)); + outStream.write(MetaDataFormatUtils.getIndexInformation(index, isOutputPadded).getBytes(StandardCharsets.UTF_8)); } } catch (FileNotFoundException e) { LOG.info("show indexes: " + stringifyException(e)); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java index deb7c76..2165ba0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java @@ -293,7 +293,7 @@ protected void flushToFile() throws IOException, HiveException { // get tmp file URI Path tmpURI = getExecContext().getLocalWork().getTmpPath(); if (isLogInfoEnabled) { - LOG.info("Temp URI for side table: " + tmpURI); + LOG.info("Temp URI2 for side table: " + tmpURI); } for (byte tag = 0; tag < mapJoinTables.length; tag++) { // get the key and value @@ -307,7 +307,7 @@ protected void flushToFile() throws IOException, HiveException { // get the tmp URI path; it will be a hdfs path if not local mode String dumpFilePrefix = conf.getDumpFilePrefix(); Path path = Utilities.generatePath(tmpURI, dumpFilePrefix, tag, fileName); - console.printInfo(Utilities.now() + "\tDump the side-table for tag: " + tag + + console.printInfo(Utilities.now() + "\tDump5 the side-table for tag: " + tag + " with group count: " + tableContainer.size() + " into file: " + path); // get the hashtable file and path FileSystem fs = path.getFileSystem(hconf); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java index 56b3f84..1470b84 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java @@ -101,9 +101,9 @@ public static Compressor getCompressor(CompressionCodec codec) { Compressor compressor = borrow(COMPRESSOR_POOL, codec.getCompressorType()); if (compressor == null) { compressor = codec.createCompressor(); - LOG.info("Got brand-new compressor"); + LOG.info("Got2 brand-new compressor"); } else { - LOG.debug("Got recycled compressor"); + LOG.debug("Got2 recycled compressor"); } return compressor; } @@ -123,9 +123,9 @@ public static Decompressor getDecompressor(CompressionCodec codec) { .getDecompressorType()); if (decompressor == null) { decompressor = codec.createDecompressor(); - LOG.info("Got brand-new decompressor"); + LOG.info("Got2 brand-new decompressor"); } else { - LOG.debug("Got recycled decompressor"); + LOG.debug("Got2 recycled decompressor"); } return decompressor; } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java index 03803bb..7c00afb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java @@ -233,11 +233,11 @@ private static void formatWithoutIndentation(String name, String type, String co appendColumnStatsNoFormatting(colBuffer, "", "", "", "", "", "", "", ""); } } - colBuffer.append(comment == null ? "" : comment); + colBuffer.append(comment == null ? "" : HiveStringUtils.escapeJava(comment)); colBuffer.append(LINE_DELIM); } - public static String getAllColumnsInformation(Index index) { + public static String getIndexInformation(Index index, boolean isOutputPadded) { StringBuilder indexInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); List indexColumns = new ArrayList(); @@ -268,9 +268,10 @@ public static String getAllColumnsInformation(Index index) { IndexType indexType = HiveIndex.getIndexTypeByClassName(indexHandlerClass); indexColumns.add(indexType.getName()); - indexColumns.add(index.getParameters().get("comment")); + String comment = index.getParameters().get("comment"); + indexColumns.add(comment == null ? null : HiveStringUtils.escapeJava(comment)); - formatOutput(indexColumns.toArray(new String[0]), indexInfo); + formatOutput(indexColumns.toArray(new String[0]), indexInfo, isOutputPadded); return indexInfo.toString(); } @@ -354,12 +355,12 @@ public static String getPartitionInformation(Partition part) { return tableInfo.toString(); } - public static String getTableInformation(Table table) { + public static String getTableInformation(Table table, boolean isOutputPadded) { StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); // Table Metadata tableInfo.append(LINE_DELIM).append("# Detailed Table Information").append(LINE_DELIM); - getTableMetaDataInformation(tableInfo, table); + getTableMetaDataInformation(tableInfo, table, isOutputPadded); // Storage information. tableInfo.append(LINE_DELIM).append("# Storage Information").append(LINE_DELIM); @@ -427,7 +428,8 @@ private static void getStorageDescriptorInfo(StringBuilder tableInfo, } } - private static void getTableMetaDataInformation(StringBuilder tableInfo, Table tbl) { + private static void getTableMetaDataInformation(StringBuilder tableInfo, Table tbl, + boolean isOutputPadded) { formatOutput("Database:", tbl.getDbName(), tableInfo); formatOutput("Owner:", tbl.getOwner(), tableInfo); formatOutput("CreateTime:", formatDate(tbl.getTTable().getCreateTime()), tableInfo); @@ -440,7 +442,7 @@ private static void getTableMetaDataInformation(StringBuilder tableInfo, Table if (tbl.getParameters().size() > 0) { tableInfo.append("Table Parameters:").append(LINE_DELIM); - displayAllParameters(tbl.getParameters(), tableInfo, false); + displayAllParameters(tbl.getParameters(), tableInfo, false, isOutputPadded); } } @@ -464,7 +466,7 @@ private static void getPartitionMetaDataInformation(StringBuilder tableInfo, Par * including unicode. */ private static void displayAllParameters(Map params, StringBuilder tableInfo) { - displayAllParameters(params, tableInfo, true); + displayAllParameters(params, tableInfo, true, false); } /** @@ -472,15 +474,14 @@ private static void displayAllParameters(Map params, StringBuild * including unicode if escapeUnicode is true; otherwise the characters other * than unicode will be escaped. */ - - private static void displayAllParameters(Map params, StringBuilder tableInfo, boolean escapeUnicode) { + private static void displayAllParameters(Map params, StringBuilder tableInfo, boolean escapeUnicode, boolean isOutputPadded) { List keys = new ArrayList(params.keySet()); Collections.sort(keys); for (String key : keys) { tableInfo.append(FIELD_DELIM); // Ensures all params are indented. formatOutput(key, escapeUnicode ? StringEscapeUtils.escapeJava(params.get(key)) : HiveStringUtils.escapeJava(params.get(key)), - tableInfo); + tableInfo, isOutputPadded); } } @@ -496,21 +497,74 @@ private static String formatDate(long timeInSeconds) { return "UNKNOWN"; } - private static void formatOutput(String[] fields, StringBuilder tableInfo) { - for (String field : fields) { - if (field == null) { - tableInfo.append(FIELD_DELIM); - continue; + /** + * Prints a row with the given fields into the builder + * The last field could be a multiline field, and the extra lines should be padded + * @param fields The fields to print + * @param tableInfo The target builder + * @param isLastLinePadded Is the last field could be printed in multiple lines, if contains + * newlines? + */ + private static void formatOutput(String[] fields, StringBuilder tableInfo, + boolean isLastLinePadded) { + int[] paddings = new int[fields.length-1]; + if (fields.length > 1) { + for (int i = 0; i < fields.length - 1; i++) { + if (fields[i] == null) { + tableInfo.append(FIELD_DELIM); + continue; + } + tableInfo.append(String.format("%-" + ALIGNMENT + "s", fields[i])).append(FIELD_DELIM); + paddings[i] = ALIGNMENT > fields[i].length() ? ALIGNMENT : fields[i].length(); } - tableInfo.append(String.format("%-" + ALIGNMENT + "s", field)).append(FIELD_DELIM); } - tableInfo.append(LINE_DELIM); + if (fields.length > 0) { + String value = fields[fields.length-1]; + String unescapedValue = + (isLastLinePadded && value != null) ? value.replaceAll("\\\\n|\\\\r|\\\\r\\\\n","\n") + :value; + indentMultilineValue(unescapedValue, tableInfo, paddings); + } else { + tableInfo.append(LINE_DELIM); + } } - private static void formatOutput(String name, String value, - StringBuilder tableInfo) { + /** + * Prints a row the given fields to a formatted line + * @param fields The fields to print + * @param tableInfo The target builder + */ + private static void formatOutput(String[] fields, StringBuilder tableInfo) { + formatOutput(fields, tableInfo, false); + } + + /** + * Prints the name value pair, and if the value contains newlines, it add one more empty field + * before the two values (Assumes, the name value pair is already indented with it) + * @param name The field name to print + * @param value The value to print - might contain newlines + * @param tableInfo The target builder + */ + private static void formatOutput(String name, String value, StringBuilder tableInfo) { tableInfo.append(String.format("%-" + ALIGNMENT + "s", name)).append(FIELD_DELIM); - tableInfo.append(String.format("%-" + ALIGNMENT + "s", value)).append(LINE_DELIM); + int colNameLength = ALIGNMENT > name.length() ? ALIGNMENT : name.length(); + indentMultilineValue(value, tableInfo, new int[] {0, colNameLength}); + } + + /** + * Prints the name value pair + * It the output is padded then unescape the value, so it could be printed in multiple lines. + * In this case it assumes the pair is already indented with a field delimiter + * @param name The field name to print + * @param value The value t print + * @param tableInfo The target builder + * @param isOutputPadded Should the value printed as a padded string? + */ + private static void formatOutput(String name, String value, StringBuilder tableInfo, + boolean isOutputPadded) { + String unescapedValue = + (isOutputPadded && value != null) ? value.replaceAll("\\\\n|\\\\r|\\\\r\\\\n","\n"):value; + formatOutput(name, unescapedValue, tableInfo); } private static void formatWithIndentation(String colName, String colType, String colComment, @@ -559,17 +613,47 @@ private static void formatWithIndentation(String colName, String colType, String } } - // comment indent processing for multi-line comments - // comments should be indented the same amount on each line - // if the first line comment starts indented by k, - // the following line comments should also be indented by k - String[] commentSegments = colComment.split("\n|\r|\r\n"); - tableInfo.append(String.format("%-" + ALIGNMENT + "s", commentSegments[0])).append(LINE_DELIM); int colNameLength = ALIGNMENT > colName.length() ? ALIGNMENT : colName.length(); int colTypeLength = ALIGNMENT > colType.length() ? ALIGNMENT : colType.length(); - for (int i = 1; i < commentSegments.length; i++) { - tableInfo.append(String.format("%" + colNameLength + "s" + FIELD_DELIM + "%" - + colTypeLength + "s" + FIELD_DELIM + "%s", "", "", commentSegments[i])).append(LINE_DELIM); + indentMultilineValue(colComment, tableInfo, new int[]{colNameLength, colTypeLength}); + } + + /** + * comment indent processing for multi-line values + * values should be indented the same amount on each line + * if the first line comment starts indented by k, + * the following line comments should also be indented by k + * @param value the value to write + * @param tableInfo the buffer to write to + * @param columnWidths the widths of the previous columns + */ + private static void indentMultilineValue(String value, StringBuilder tableInfo, + int[] columnWidths) { + if (value==null) { + tableInfo.append(String.format("%-" + ALIGNMENT + "s", value)).append(LINE_DELIM); + } else { + String[] valueSegments = value.split("\n|\r|\r\n"); + tableInfo.append(String.format("%-" + ALIGNMENT + "s", valueSegments[0])).append(LINE_DELIM); + for (int i = 1; i < valueSegments.length; i++) { + printPadding(tableInfo, columnWidths); + tableInfo.append(String.format("%-" + ALIGNMENT + "s", valueSegments[i])) + .append(LINE_DELIM); + } + } + } + + /** + * Print the rigth padding, with the given column widths + * @param tableInfo The buffer to write to + * @param columnWidths The column widths + */ + private static void printPadding(StringBuilder tableInfo, int[] columnWidths) { + for (int columnWidth : columnWidths) { + if (columnWidth == 0) { + tableInfo.append(FIELD_DELIM); + } else { + tableInfo.append(String.format("%" + columnWidth + "s" + FIELD_DELIM, "")); + } } } @@ -611,6 +695,7 @@ public static String getIndexColumnsHeader() { formatOutput(ShowIndexesDesc.getSchema().split("#")[0].split(","), indexCols); return indexCols.toString(); } + public static MetaDataFormatter getFormatter(HiveConf conf) { if ("json".equals(conf.get(HiveConf.ConfVars.HIVE_DDL_OUTPUT_FORMAT.varname, "text"))) { return new JsonMetaDataFormatter(); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index 47d67b1..b990bda 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Set; +import org.apache.hive.common.util.HiveStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; @@ -139,7 +140,7 @@ public void describeTable(DataOutputStream outStream, String colPath, if (part != null) { output = MetaDataFormatUtils.getPartitionInformation(part); } else { - output = MetaDataFormatUtils.getTableInformation(tbl); + output = MetaDataFormatUtils.getTableInformation(tbl, isOutputPadded); } outStream.write(output.getBytes("UTF-8")); @@ -460,7 +461,7 @@ public void showDatabaseDescription(DataOutputStream outStream, String database, outStream.write(database.getBytes("UTF-8")); outStream.write(separator); if (comment != null) { - outStream.write(comment.getBytes("UTF-8")); + outStream.write(HiveStringUtils.escapeJava(comment).getBytes("UTF-8")); } outStream.write(separator); if (location != null) { diff --git ql/src/test/queries/clientpositive/escape_comments.q ql/src/test/queries/clientpositive/escape_comments.q new file mode 100644 index 0000000..8c38690 --- /dev/null +++ ql/src/test/queries/clientpositive/escape_comments.q @@ -0,0 +1,20 @@ +create database escape_comments_db comment 'a\nb'; +use escape_comments_db; +create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' +partitioned by (p1 string comment 'a\nb'); +create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1; +create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb'; + +describe database extended escape_comments_db; +describe database escape_comments_db; +show create table escape_comments_tbl1; +describe formatted escape_comments_tbl1; +describe pretty escape_comments_tbl1; +describe escape_comments_tbl1; +show create table escape_comments_view1; +describe formatted escape_comments_view1; +show formatted index on escape_comments_tbl1; + +drop database escape_comments_db cascade; diff --git ql/src/test/results/clientpositive/alter_view_as_select.q.out ql/src/test/results/clientpositive/alter_view_as_select.q.out index 2d82395..dc1814e 100644 --- ql/src/test/results/clientpositive/alter_view_as_select.q.out +++ ql/src/test/results/clientpositive/alter_view_as_select.q.out @@ -134,14 +134,14 @@ Bucket Columns: [] Sort Columns: [] # View Information -View Original Text: SELECT * FROM src -WHERE key > 80 AND key < 100 -ORDER BY key, value -LIMIT 10 +View Original Text: SELECT * FROM src + WHERE key > 80 AND key < 100 + ORDER BY key, value + LIMIT 10 View Expanded Text: SELECT `src`.`key`, `src`.`value` FROM `default`.`src` -WHERE `src`.`key` > 80 AND `src`.`key` < 100 -ORDER BY `src`.`key`, `src`.`value` -LIMIT 10 + WHERE `src`.`key` > 80 AND `src`.`key` < 100 + ORDER BY `src`.`key`, `src`.`value` + LIMIT 10 PREHOOK: query: DROP VIEW tv.testView PREHOOK: type: DROPVIEW PREHOOK: Input: tv@testview diff --git ql/src/test/results/clientpositive/create_like.q.out ql/src/test/results/clientpositive/create_like.q.out index 0111c94..58d9879 100644 --- ql/src/test/results/clientpositive/create_like.q.out +++ ql/src/test/results/clientpositive/create_like.q.out @@ -354,7 +354,28 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - avro.schema.literal {\n \"namespace\": \"testing.hive.avro.serde\",\n \"name\": \"doctors\",\n \"type\": \"record\",\n \"fields\": [\n {\n \"name\":\"number\",\n \"type\":\"int\",\n \"doc\":\"Order of playing the role\"\n },\n {\n \"name\":\"first_name\",\n \"type\":\"string\",\n \"doc\":\"first name of actor playing role\"\n },\n {\n \"name\":\"last_name\",\n \"type\":\"string\",\n \"doc\":\"last name of actor playing role\"\n }\n ]\n} + avro.schema.literal { + \"namespace\": \"testing.hive.avro.serde\", + \"name\": \"doctors\", + \"type\": \"record\", + \"fields\": [ + { + \"name\":\"number\", + \"type\":\"int\", + \"doc\":\"Order of playing the role\" + }, + { + \"name\":\"first_name\", + \"type\":\"string\", + \"doc\":\"first name of actor playing role\" + }, + { + \"name\":\"last_name\", + \"type\":\"string\", + \"doc\":\"last name of actor playing role\" + } + ] + } k1 v1 k2 v2 #### A masked pattern was here #### @@ -402,7 +423,28 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} - avro.schema.literal {\n \"namespace\": \"testing.hive.avro.serde\",\n \"name\": \"doctors\",\n \"type\": \"record\",\n \"fields\": [\n {\n \"name\":\"number\",\n \"type\":\"int\",\n \"doc\":\"Order of playing the role\"\n },\n {\n \"name\":\"first_name\",\n \"type\":\"string\",\n \"doc\":\"first name of actor playing role\"\n },\n {\n \"name\":\"last_name\",\n \"type\":\"string\",\n \"doc\":\"last name of actor playing role\"\n }\n ]\n} + avro.schema.literal { + \"namespace\": \"testing.hive.avro.serde\", + \"name\": \"doctors\", + \"type\": \"record\", + \"fields\": [ + { + \"name\":\"number\", + \"type\":\"int\", + \"doc\":\"Order of playing the role\" + }, + { + \"name\":\"first_name\", + \"type\":\"string\", + \"doc\":\"first name of actor playing role\" + }, + { + \"name\":\"last_name\", + \"type\":\"string\", + \"doc\":\"last name of actor playing role\" + } + ] + } numFiles 0 numRows 0 rawDataSize 0 diff --git ql/src/test/results/clientpositive/create_view.q.out ql/src/test/results/clientpositive/create_view.q.out index d9c1e11..dba2342 100644 --- ql/src/test/results/clientpositive/create_view.q.out +++ ql/src/test/results/clientpositive/create_view.q.out @@ -773,9 +773,9 @@ Sort Columns: [] # View Information View Original Text: SELECT test_translate('abc', 'a', 'b') -FROM table1 + FROM table1 View Expanded Text: SELECT `_c0` AS `c` FROM (SELECT `test_translate`('abc', 'a', 'b') -FROM `default`.`table1`) `default.view8` + FROM `default`.`table1`) `default.view8` PREHOOK: query: SELECT * FROM view8 PREHOOK: type: QUERY PREHOOK: Input: default@table1 @@ -853,9 +853,9 @@ Sort Columns: [] # View Information View Original Text: SELECT test_max(length(value)) -FROM src + FROM src View Expanded Text: SELECT `_c0` AS `m` FROM (SELECT `test_max`(length(`src`.`value`)) -FROM `default`.`src`) `default.view9` + FROM `default`.`src`) `default.view9` PREHOOK: query: SELECT * FROM view9 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -931,9 +931,9 @@ Sort Columns: [] # View Information View Original Text: SELECT test_max(length(value)) -FROM src + FROM src View Expanded Text: SELECT `_c0` AS `m` FROM (SELECT `test_max`(length(`src`.`value`)) -FROM `default`.`src`) `default.view9` + FROM `default`.`src`) `default.view9` PREHOOK: query: SELECT * FROM view9 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -1075,9 +1075,9 @@ Sort Columns: [] # View Information View Original Text: SELECT test_explode(array(1,2,3)) AS (boom) -FROM table1 + FROM table1 View Expanded Text: SELECT `test_explode`(array(1,2,3)) AS (`boom`) -FROM `default`.`table1` + FROM `default`.`table1` PREHOOK: query: SELECT * FROM view11 PREHOOK: type: QUERY PREHOOK: Input: default@table1 @@ -1231,10 +1231,10 @@ Bucket Columns: [] Sort Columns: [] # View Information -View Original Text: SELECT s.key -FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s -View Expanded Text: SELECT `s`.`key` -FROM `default`.`srcbucket` TABLESAMPLE (BUCKET 1 OUT OF 5 ON `key`) `s` +View Original Text: SELECT s.key + FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s +View Expanded Text: SELECT `s`.`key` + FROM `default`.`srcbucket` TABLESAMPLE (BUCKET 1 OUT OF 5 ON `key`) `s` PREHOOK: query: SELECT * FROM view13 ORDER BY key LIMIT 12 PREHOOK: type: QUERY @@ -1353,25 +1353,25 @@ Sort Columns: [] # View Information View Original Text: SELECT unionsrc1.key as k1, unionsrc1.value as v1, - unionsrc2.key as k2, unionsrc2.value as v2 -FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 - UNION ALL - select s2.key as key, s2.value as value from src s2 where s2.key < 10) unionsrc1 -JOIN - (select 'tst1' as key, cast(count(1) as string) as value from src s3 - UNION ALL - select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2 -ON (unionsrc1.key = unionsrc2.key) + unionsrc2.key as k2, unionsrc2.value as v2 + FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1 + UNION ALL + select s2.key as key, s2.value as value from src s2 where s2.key < 10) unionsrc1 + JOIN + (select 'tst1' as key, cast(count(1) as string) as value from src s3 + UNION ALL + select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2 + ON (unionsrc1.key = unionsrc2.key) View Expanded Text: SELECT `unionsrc1`.`key` as `k1`, `unionsrc1`.`value` as `v1`, - `unionsrc2`.`key` as `k2`, `unionsrc2`.`value` as `v2` -FROM (select 'tst1' as `key`, cast(count(1) as string) as `value` from `default`.`src` `s1` - UNION ALL - select `s2`.`key` as `key`, `s2`.`value` as `value` from `default`.`src` `s2` where `s2`.`key` < 10) `unionsrc1` -JOIN - (select 'tst1' as `key`, cast(count(1) as string) as `value` from `default`.`src` `s3` - UNION ALL - select `s4`.`key` as `key`, `s4`.`value` as `value` from `default`.`src` `s4` where `s4`.`key` < 10) `unionsrc2` -ON (`unionsrc1`.`key` = `unionsrc2`.`key`) + `unionsrc2`.`key` as `k2`, `unionsrc2`.`value` as `v2` + FROM (select 'tst1' as `key`, cast(count(1) as string) as `value` from `default`.`src` `s1` + UNION ALL + select `s2`.`key` as `key`, `s2`.`value` as `value` from `default`.`src` `s2` where `s2`.`key` < 10) `unionsrc1` + JOIN + (select 'tst1' as `key`, cast(count(1) as string) as `value` from `default`.`src` `s3` + UNION ALL + select `s4`.`key` as `key`, `s4`.`value` as `value` from `default`.`src` `s4` where `s4`.`key` < 10) `unionsrc2` + ON (`unionsrc1`.`key` = `unionsrc2`.`key`) PREHOOK: query: SELECT * FROM view14 ORDER BY k1 PREHOOK: type: QUERY @@ -1469,11 +1469,11 @@ Sort Columns: [] # View Information View Original Text: SELECT key,COUNT(value) AS value_count -FROM src -GROUP BY key + FROM src + GROUP BY key View Expanded Text: SELECT `src`.`key`,COUNT(`src`.`value`) AS `value_count` -FROM `default`.`src` -GROUP BY `src`.`key` + FROM `default`.`src` + GROUP BY `src`.`key` PREHOOK: query: SELECT * FROM view15 ORDER BY value_count DESC, key LIMIT 10 @@ -1554,9 +1554,9 @@ Sort Columns: [] # View Information View Original Text: SELECT DISTINCT value -FROM src + FROM src View Expanded Text: SELECT DISTINCT `src`.`value` -FROM `default`.`src` + FROM `default`.`src` PREHOOK: query: SELECT * FROM view16 ORDER BY value LIMIT 10 diff --git ql/src/test/results/clientpositive/create_view_partitioned.q.out ql/src/test/results/clientpositive/create_view_partitioned.q.out index 15d777a..4373303 100644 --- ql/src/test/results/clientpositive/create_view_partitioned.q.out +++ ql/src/test/results/clientpositive/create_view_partitioned.q.out @@ -86,12 +86,12 @@ Bucket Columns: [] Sort Columns: [] # View Information -View Original Text: SELECT key, value -FROM src -WHERE key=86 +View Original Text: SELECT key, value + FROM src + WHERE key=86 View Expanded Text: SELECT `src`.`key`, `src`.`value` -FROM `default`.`src` -WHERE `src`.`key`=86 + FROM `default`.`src` + WHERE `src`.`key`=86 PREHOOK: query: SELECT * FROM vp1 PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -406,12 +406,12 @@ Bucket Columns: [] Sort Columns: [] # View Information -View Original Text: SELECT key, value -FROM src -WHERE key=86 +View Original Text: SELECT key, value + FROM src + WHERE key=86 View Expanded Text: SELECT `key` AS `k`, `value` AS `v` FROM (SELECT `src`.`key`, `src`.`value` -FROM `default`.`src` -WHERE `src`.`key`=86) `default.vp3` + FROM `default`.`src` + WHERE `src`.`key`=86) `default.vp3` PREHOOK: query: ALTER VIEW vp3 ADD PARTITION (v='val_86') PREHOOK: type: ALTERTABLE_ADDPARTS diff --git ql/src/test/results/clientpositive/create_view_translate.q.out ql/src/test/results/clientpositive/create_view_translate.q.out index 2789f8f..43b9062 100644 --- ql/src/test/results/clientpositive/create_view_translate.q.out +++ ql/src/test/results/clientpositive/create_view_translate.q.out @@ -90,11 +90,11 @@ Sort Columns: [] # View Information View Original Text: select key, value from ( - select key, value from src -) a + select key, value from src + ) a View Expanded Text: select `a`.`key`, `a`.`value` from ( - select `src`.`key`, `src`.`value` from `default`.`src` -) `a` + select `src`.`key`, `src`.`value` from `default`.`src` + ) `a` PREHOOK: query: drop view v PREHOOK: type: DROPVIEW PREHOOK: Input: default@v diff --git ql/src/test/results/clientpositive/describe_comment_indent.q.out ql/src/test/results/clientpositive/describe_comment_indent.q.out index 5a01de1..5b41fb8 100644 --- ql/src/test/results/clientpositive/describe_comment_indent.q.out +++ ql/src/test/results/clientpositive/describe_comment_indent.q.out @@ -34,10 +34,10 @@ POSTHOOK: type: DESCTABLE POSTHOOK: Input: default@test_table col1 int col1 one line comment col2 string col2 - two lines comment + two lines comment col3 string col3 - three lines - comment + three lines + comment PREHOOK: query: DESCRIBE FORMATTED test_table PREHOOK: type: DESCTABLE PREHOOK: Input: default@test_table @@ -48,10 +48,10 @@ POSTHOOK: Input: default@test_table col1 int col1 one line comment col2 string col2 - two lines comment + two lines comment col3 string col3 - three lines - comment + three lines + comment # Detailed Table Information Database: default @@ -61,7 +61,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} - comment table comment\ntwo lines + comment table comment + two lines numFiles 0 numRows 0 rawDataSize 0 diff --git ql/src/test/results/clientpositive/escape_comments.q.out ql/src/test/results/clientpositive/escape_comments.q.out new file mode 100644 index 0000000..0b8c5c5 --- /dev/null +++ ql/src/test/results/clientpositive/escape_comments.q.out @@ -0,0 +1,213 @@ +PREHOOK: query: create database escape_comments_db comment 'a\nb' +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:escape_comments_db +POSTHOOK: query: create database escape_comments_db comment 'a\nb' +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:escape_comments_db +PREHOOK: query: use escape_comments_db +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:escape_comments_db +POSTHOOK: query: use escape_comments_db +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:escape_comments_db +PREHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\';') comment 'a\nb' +partitioned by (p1 string comment 'a\nb') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\';') comment 'a\nb' +partitioned by (p1 string comment 'a\nb') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 +PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_view1 +POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_view1 +PREHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +PREHOOK: type: CREATEINDEX +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +POSTHOOK: type: CREATEINDEX +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +PREHOOK: query: describe database extended escape_comments_db +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:escape_comments_db +POSTHOOK: query: describe database extended escape_comments_db +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:escape_comments_db +escape_comments_db a\nb location/in/test hive_test_user USER +PREHOOK: query: describe database escape_comments_db +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:escape_comments_db +POSTHOOK: query: describe database escape_comments_db +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:escape_comments_db +escape_comments_db a\nb location/in/test hive_test_user USER +PREHOOK: query: show create table escape_comments_tbl1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: show create table escape_comments_tbl1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +CREATE TABLE `escape_comments_tbl1`( + `col1` string COMMENT 'a\nb\'\;') +COMMENT 'a\nb' +PARTITIONED BY ( + `p1` string COMMENT 'a\nb') +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: describe formatted escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: describe formatted escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +# col_name data_type comment + +col1 string a + b'; + +# Partition Information +# col_name data_type comment + +p1 string a + b + +# Detailed Table Information +Database: escape_comments_db +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + comment a + b +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: describe pretty escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: describe pretty escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +col_name data_type comment + +col1 string a + b'; +p1 string a + b + +# Partition Information +col_name data_type comment + +p1 string a + b +PREHOOK: query: describe escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: describe escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +col1 string a + b'; +p1 string a + b + +# Partition Information +# col_name data_type comment + +p1 string a + b +PREHOOK: query: show create table escape_comments_view1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: escape_comments_db@escape_comments_view1 +POSTHOOK: query: show create table escape_comments_view1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: escape_comments_db@escape_comments_view1 +CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1` +PREHOOK: query: describe formatted escape_comments_view1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_view1 +POSTHOOK: query: describe formatted escape_comments_view1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_view1 +# col_name data_type comment + +col1 string a + b + +# Detailed Table Information +Database: escape_comments_db +#### A masked pattern was here #### +Retention: 0 +Table Type: VIRTUAL_VIEW +Table Parameters: + comment a + b +#### A masked pattern was here #### + +# Storage Information +SerDe Library: null +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] + +# View Information +View Original Text: select col1 from escape_comments_tbl1 +View Expanded Text: SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1` +PREHOOK: query: show formatted index on escape_comments_tbl1 +PREHOOK: type: SHOWINDEXES +POSTHOOK: query: show formatted index on escape_comments_tbl1 +POSTHOOK: type: SHOWINDEXES +idx_name tab_name col_names idx_tab_name idx_type comment + + +index2 escape_comments_tbl1 col1 escape_comments_db__escape_comments_tbl1_index2__ compact a + b +PREHOOK: query: drop database escape_comments_db cascade +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:escape_comments_db +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +PREHOOK: Output: escape_comments_db@escape_comments_tbl1 +PREHOOK: Output: escape_comments_db@escape_comments_view1 +POSTHOOK: query: drop database escape_comments_db cascade +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:escape_comments_db +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: escape_comments_db@escape_comments_view1 diff --git service/src/java/org/apache/hive/service/servlet/QueryProfileServlet.java service/src/java/org/apache/hive/service/servlet/QueryProfileServlet.java index 8fa447a..6f820a9 100644 --- service/src/java/org/apache/hive/service/servlet/QueryProfileServlet.java +++ service/src/java/org/apache/hive/service/servlet/QueryProfileServlet.java @@ -20,7 +20,6 @@ import org.apache.hive.service.cli.operation.OperationManager; import org.apache.hive.service.cli.operation.SQLOperationDisplay; import org.apache.hive.service.cli.session.SessionManager; -import org.apache.hive.tmpl.QueryProfileTmpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,6 +51,5 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) return; } - new QueryProfileTmpl().render(response.getWriter(), sod); } }