diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java index 3c8fccc..8b94483 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -1755,7 +1755,7 @@ String dequote(String str) { for (int i = 0; i < obs.length - 1; i += 2) { m.put(obs[i], obs[i + 1]); } - return Collections.unmodifiableMap(m); + return m; } @@ -2385,4 +2385,14 @@ public String getCurrentDatabase() { public void setCurrentDatabase(String currentDatabase) { this.currentDatabase = currentDatabase; } + + /** + * Adding a new format to the possible format lists. Not intended for public usage. Added here, + * so the query tests could add their own format. + * @param key The name of the format + * @param format The instance, which implements the given format + */ + void addOutputFormat(String key, OutputFormat format) { + formats.put(key, format); + } } diff --git beeline/src/java/org/apache/hive/beeline/Commands.java beeline/src/java/org/apache/hive/beeline/Commands.java index 99ee82c..da43e90 100644 --- beeline/src/java/org/apache/hive/beeline/Commands.java +++ beeline/src/java/org/apache/hive/beeline/Commands.java @@ -1795,7 +1795,7 @@ public boolean run(String line) { if (cmd != null) { // we're continuing an existing command - cmd.append(" \n"); + cmd.append("\n"); cmd.append(scriptLine); if (trimmedLine.endsWith(";")) { // this command has terminated diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 0c590c8..98d6b08 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -727,7 +727,8 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\ encryption_ctas.q beeline.positive.include=drop_with_concurrency.q,\ - escape_comments.q + escape_comments.q,\ + select_dummy_source.q minimr.query.negative.files=cluster_tasklog_retrieval.q,\ file_with_header_footer_negative.q,\ diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index acc02eb..b655699 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -22,10 +22,10 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter; -import org.apache.hive.beeline.qfile.QFile; -import org.apache.hive.beeline.qfile.QFile.QFileBuilder; -import org.apache.hive.beeline.qfile.QFileBeeLineClient; -import org.apache.hive.beeline.qfile.QFileBeeLineClient.QFileClientBuilder; +import org.apache.hive.beeline.QFile; +import org.apache.hive.beeline.QFile.QFileBuilder; +import org.apache.hive.beeline.QFileBeeLineClient; +import org.apache.hive.beeline.QFileBeeLineClient.QFileClientBuilder; import org.apache.hive.jdbc.miniHS2.MiniHS2; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -92,12 +92,9 @@ public void beforeClass() throws Exception { .setPassword("password"); fileBuilder = new QFileBuilder() - .setHiveRootDirectory(hiveRootDirectory) .setLogDirectory(logDirectory) .setQueryDirectory(queryDirectory) - .setResultsDirectory(resultsDirectory) - .setScratchDirectoryString(hiveConf.getVar(HiveConf.ConfVars.SCRATCHDIR)) - .setWarehouseDirectoryString(hiveConf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE)); + .setResultsDirectory(resultsDirectory); runInfraScript(initScript, new File(logDirectory, "init.beeline"), new File(logDirectory, "init.raw")); @@ -111,7 +108,7 @@ protected void runInfraScript(File script, File beeLineOutput, File log) "set hive.exec.pre.hooks=" + PreExecutePrinter.class.getName() + ";", "set test.data.dir=" + testDataDirectory + ";", "set test.script.dir=" + testScriptDirectory + ";", - "!run " + script, + "!run " + script }, log); } diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java itests/util/src/main/java/org/apache/hive/beeline/QFile.java similarity index 58% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java rename to itests/util/src/main/java/org/apache/hive/beeline/QFile.java index 49d6d24..7d90a99 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFile.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hive.beeline.qfile; +package org.apache.hive.beeline; import org.apache.commons.io.FileUtils; import org.apache.hadoop.util.Shell; @@ -43,11 +43,10 @@ private File inputFile; private File rawOutputFile; private File outputFile; - private File expcetedOutputFile; + private File expectedOutputFile; private File logFile; private File infraLogFile; - private static RegexFilterSet staticFilterSet = getStaticFilterSet(); - private RegexFilterSet specificFilterSet; + private static RegexFilterSet filterSet = getFilterSet(); private QFile() {} @@ -63,14 +62,6 @@ public File getRawOutputFile() { return rawOutputFile; } - public File getOutputFile() { - return outputFile; - } - - public File getExpectedOutputFile() { - return expcetedOutputFile; - } - public File getLogFile() { return logFile; } @@ -81,23 +72,30 @@ public File getInfraLogFile() { public void filterOutput() throws IOException { String rawOutput = FileUtils.readFileToString(rawOutputFile); - String filteredOutput = staticFilterSet.filter(specificFilterSet.filter(rawOutput)); - FileUtils.writeStringToFile(outputFile, filteredOutput); + String[] splitRawOutput = rawOutput.split(String.valueOf(QFileOutputFormat.BOUNDING_CHAR)); + StringBuffer filteredBuffer = new StringBuffer(); + for(int pos=0; pos 0) { + try { + writer.write(vals); + } catch (IOException e) { + beeLine.error(e); + } finally { + IOUtils.closeStream(writer); + } + } + return strWriter.toString(); + } + + private void printRow(Rows.Row row) { + String[] vals = row.values; + String formattedStr = getFormattedStr(vals); + beeLine.output(BOUNDING_CHAR + formattedStr + BOUNDING_CHAR); + } +} diff --git itests/util/src/main/java/org/apache/hive/beeline/QFilePostExecutePrinter.java itests/util/src/main/java/org/apache/hive/beeline/QFilePostExecutePrinter.java new file mode 100644 index 0000000..553e41e --- /dev/null +++ itests/util/src/main/java/org/apache/hive/beeline/QFilePostExecutePrinter.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.beeline; + +import org.apache.hadoop.hive.ql.hooks.PostExecutePrinter; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Extend the PostExecutePrinter, so it will bound the output with '\0' chars. + */ +public class QFilePostExecutePrinter extends PostExecutePrinter { + @Override + protected void printMessage(SessionState.LogHelper console, String message) { + console.printError(QFileOutputFormat.BOUNDING_CHAR + message + QFileOutputFormat.BOUNDING_CHAR); + } +} diff --git itests/util/src/main/java/org/apache/hive/beeline/QFilePreExecutePrinter.java itests/util/src/main/java/org/apache/hive/beeline/QFilePreExecutePrinter.java new file mode 100644 index 0000000..292d52d --- /dev/null +++ itests/util/src/main/java/org/apache/hive/beeline/QFilePreExecutePrinter.java @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.beeline; + +import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Extend the PreExecutePrinter, so it will bound the output with '\0' chars. + */ +public class QFilePreExecutePrinter extends PreExecutePrinter { + + @Override + protected void printMessage(SessionState.LogHelper console, String message) { + console.printError(QFileOutputFormat.BOUNDING_CHAR + message + QFileOutputFormat.BOUNDING_CHAR); + } +} diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java itests/util/src/main/java/org/apache/hive/beeline/package-info.java similarity index 89% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java rename to itests/util/src/main/java/org/apache/hive/beeline/package-info.java index fcd50ec..75b9fb4 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java +++ itests/util/src/main/java/org/apache/hive/beeline/package-info.java @@ -17,6 +17,6 @@ */ /** - * Package for the BeeLine specific QTest file classes. + * Package for the BeeLine specific file classes. */ -package org.apache.hive.beeline.qfile; +package org.apache.hive.beeline; diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java index b4fc125..86e854f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java @@ -40,7 +40,7 @@ * Implementation of a post execute hook that simply prints out its parameters * to standard output. */ -public class PostExecutePrinter implements ExecuteWithHookContext { +public class PostExecutePrinter extends PrinterHook implements ExecuteWithHookContext { public class DependencyKeyComp implements Comparator> { @@ -116,12 +116,12 @@ public void run(QueryState queryState, Set inputs, } if (queryState != null) { - console.printError("POSTHOOK: query: " + queryState.getQueryString().trim()); - console.printError("POSTHOOK: type: " + queryState.getCommandType()); + printMessage(console, "POSTHOOK: query: " + queryState.getQueryString().trim()); + printMessage(console, "POSTHOOK: type: " + queryState.getCommandType()); } - PreExecutePrinter.printEntities(console, inputs, "POSTHOOK: Input: "); - PreExecutePrinter.printEntities(console, outputs, "POSTHOOK: Output: "); + printEntities(console, inputs, "POSTHOOK: Input: "); + printEntities(console, outputs, "POSTHOOK: Output: "); // Also print out the generic lineage information if there is any if (linfo != null) { @@ -167,7 +167,7 @@ public void run(QueryState queryState, Set inputs, } sb.append("]"); - console.printError(sb.toString()); + printMessage(console, sb.toString()); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java index 232c62d..6a6bd3b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.ql.hooks; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; import java.util.Set; import org.apache.hadoop.hive.common.io.FetchConverter; @@ -35,7 +32,7 @@ * Implementation of a pre execute hook that simply prints out its parameters to * standard output. */ -public class PreExecutePrinter implements ExecuteWithHookContext { +public class PreExecutePrinter extends PrinterHook implements ExecuteWithHookContext { @Override public void run(HookContext hookContext) throws Exception { @@ -65,22 +62,12 @@ public void run(QueryState queryState, Set inputs, } if (queryState != null) { - console.printError("PREHOOK: query: " + queryState.getQueryString().trim()); - console.printError("PREHOOK: type: " + queryState.getCommandType()); + printMessage(console, "PREHOOK: query: " + queryState.getQueryString().trim()); + printMessage(console, "PREHOOK: type: " + queryState.getCommandType()); } printEntities(console, inputs, "PREHOOK: Input: "); printEntities(console, outputs, "PREHOOK: Output: "); } - static void printEntities(LogHelper console, Set entities, String prefix) { - List strings = new ArrayList(); - for (Object o : entities) { - strings.add(o.toString()); - } - Collections.sort(strings); - for (String s : strings) { - console.printError(prefix + s); - } - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/PrinterHook.java ql/src/java/org/apache/hadoop/hive/ql/hooks/PrinterHook.java new file mode 100644 index 0000000..252d526 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/PrinterHook.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.hooks; + +import org.apache.hadoop.hive.ql.session.SessionState; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +/** + * Hook, that will print messages to the console. + */ +public class PrinterHook { + protected void printEntities(SessionState.LogHelper console, Set entities, String prefix) { + List strings = new ArrayList(); + for (Object o : entities) { + strings.add(o.toString()); + } + Collections.sort(strings); + for (String s : strings) { + printMessage(console, prefix + s); + } + } + + protected void printMessage(SessionState.LogHelper console, String message) { + console.printError(message); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 0732207..e390580 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -11125,6 +11125,7 @@ void analyzeInternal(ASTNode ast, PlannerContext plannerCtx) throws SemanticExce .omitEmptyStrings() .split(Strings.nullToEmpty(HiveConf.getVar(conf, HiveConf.ConfVars.POSTEXECHOOKS)))); if (postExecHooks.contains("org.apache.hadoop.hive.ql.hooks.PostExecutePrinter") + || postExecHooks.contains("org.apache.hive.beeline.QFilePostExecutePrinter") || postExecHooks.contains("org.apache.hadoop.hive.ql.hooks.LineageLogger") || postExecHooks.contains("org.apache.atlas.hive.hook.HiveHook")) { ArrayList transformations = new ArrayList(); diff --git ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out index d22c9ec..1342674 100644 --- ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out +++ ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out @@ -1,67 +1,20 @@ ->>> !run !!{qFileDirectory}!!/drop_with_concurrency.q ->>> set hive.lock.numretries=1; -No rows affected ->>> set hive.lock.sleep.between.retries=1; -No rows affected ->>> set hive.support.concurrency=true; -No rows affected ->>> set hive.lock.manager=org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager; -No rows affected ->>> ->>> drop table if exists drop_with_concurrency_1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): drop table if exists drop_with_concurrency_1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): drop table if exists drop_with_concurrency_1 PREHOOK: query: drop table if exists drop_with_concurrency_1 PREHOOK: type: DROPTABLE -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: drop table if exists drop_with_concurrency_1 POSTHOOK: type: DROPTABLE -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query drop table if exists drop_with_concurrency_1 -No rows affected ->>> create table drop_with_concurrency_1 (c1 int); -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create table drop_with_concurrency_1 (c1 int) -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create table drop_with_concurrency_1 (c1 int) PREHOOK: query: create table drop_with_concurrency_1 (c1 int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:drop_with_concurrency PREHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: create table drop_with_concurrency_1 (c1 int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:drop_with_concurrency POSTHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create table drop_with_concurrency_1 (c1 int) -No rows affected ->>> drop table drop_with_concurrency_1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): drop table drop_with_concurrency_1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): drop table drop_with_concurrency_1 PREHOOK: query: drop table drop_with_concurrency_1 PREHOOK: type: DROPTABLE PREHOOK: Input: drop_with_concurrency@drop_with_concurrency_1 PREHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: drop table drop_with_concurrency_1 POSTHOOK: type: DROPTABLE POSTHOOK: Input: drop_with_concurrency@drop_with_concurrency_1 POSTHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query drop table drop_with_concurrency_1 -No rows affected ->>> !record diff --git ql/src/test/results/clientpositive/beeline/escape_comments.q.out ql/src/test/results/clientpositive/beeline/escape_comments.q.out index 5f9df93..1ab6fe4 100644 --- ql/src/test/results/clientpositive/beeline/escape_comments.q.out +++ ql/src/test/results/clientpositive/beeline/escape_comments.q.out @@ -1,90 +1,34 @@ ->>> !run !!{qFileDirectory}!!/escape_comments.q ->>> create database escape_comments_db comment 'a\nb'; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create database escape_comments_db comment 'a\nb' -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create database escape_comments_db comment 'a\nb' PREHOOK: query: create database escape_comments_db comment 'a\nb' PREHOOK: type: CREATEDATABASE PREHOOK: Output: database:escape_comments_db -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: create database escape_comments_db comment 'a\nb' POSTHOOK: type: CREATEDATABASE POSTHOOK: Output: database:escape_comments_db -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create database escape_comments_db comment 'a\nb' -No rows affected ->>> use escape_comments_db; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): use escape_comments_db -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): use escape_comments_db PREHOOK: query: use escape_comments_db PREHOOK: type: SWITCHDATABASE PREHOOK: Input: database:escape_comments_db -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: use escape_comments_db POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:escape_comments_db -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query use escape_comments_db -No rows affected ->>> create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb'); -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -PREHOOK: query: create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' +PREHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' partitioned by (p1 string comment 'a\nb') PREHOOK: type: CREATETABLE PREHOOK: Output: database:escape_comments_db PREHOOK: Output: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode -POSTHOOK: query: create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' +POSTHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' partitioned by (p1 string comment 'a\nb') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:escape_comments_db POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -No rows affected ->>> create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col1, type:string, comment:null)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' as select col1 from escape_comments_tbl1 PREHOOK: type: CREATEVIEW PREHOOK: Input: escape_comments_db@escape_comments_tbl1 PREHOOK: Output: database:escape_comments_db PREHOOK: Output: escape_comments_db@escape_comments_view1 -Starting task [Stage-1:DDL] in serial mode -POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' as select col1 from escape_comments_tbl1 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 @@ -92,313 +36,170 @@ POSTHOOK: Output: database:escape_comments_db POSTHOOK: Output: escape_comments_db@escape_comments_view1 POSTHOOK: Lineage: escape_comments_view1.col1 SIMPLE [(escape_comments_tbl1)escape_comments_tbl1.FieldSchema(name:col1, type:string, comment:a b';), ] -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -No rows affected ->>> create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb'; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' PREHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' PREHOOK: type: CREATEINDEX PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' POSTHOOK: type: CREATEINDEX POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -No rows affected ->>> ->>> describe database extended escape_comments_db; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe database extended escape_comments_db -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:db_name, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer), FieldSchema(name:location, type:string, comment:from deserializer), FieldSchema(name:owner_name, type:string, comment:from deserializer), FieldSchema(name:owner_type, type:string, comment:from deserializer), FieldSchema(name:parameters, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe database extended escape_comments_db PREHOOK: query: describe database extended escape_comments_db PREHOOK: type: DESCDATABASE PREHOOK: Input: database:escape_comments_db -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe database extended escape_comments_db POSTHOOK: type: DESCDATABASE POSTHOOK: Input: database:escape_comments_db -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe database extended escape_comments_db -'db_name','comment','location','owner_name','owner_type','parameters' -'escape_comments_db','a\nb','location/in/test','user','USER','' -1 row selected ->>> describe database escape_comments_db; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe database escape_comments_db -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:db_name, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer), FieldSchema(name:location, type:string, comment:from deserializer), FieldSchema(name:owner_name, type:string, comment:from deserializer), FieldSchema(name:owner_type, type:string, comment:from deserializer), FieldSchema(name:parameters, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe database escape_comments_db +escape_comments_db a\nb location/in/test user USER PREHOOK: query: describe database escape_comments_db PREHOOK: type: DESCDATABASE PREHOOK: Input: database:escape_comments_db -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe database escape_comments_db POSTHOOK: type: DESCDATABASE POSTHOOK: Input: database:escape_comments_db -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe database escape_comments_db -'db_name','comment','location','owner_name','owner_type','parameters' -'escape_comments_db','a\nb','location/in/test','user','USER','' -1 row selected ->>> show create table escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): show create table escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:createtab_stmt, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): show create table escape_comments_tbl1 +escape_comments_db a\nb location/in/test user USER PREHOOK: query: show create table escape_comments_tbl1 PREHOOK: type: SHOW_CREATETABLE PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: show create table escape_comments_tbl1 POSTHOOK: type: SHOW_CREATETABLE POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query show create table escape_comments_tbl1 -'createtab_stmt' -'CREATE TABLE `escape_comments_tbl1`(' -' `col1` string COMMENT 'a\nb\'\;')' -'COMMENT 'a\nb'' -'PARTITIONED BY ( ' -' `p1` string COMMENT 'a\nb')' -'ROW FORMAT SERDE ' -' 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' -'STORED AS INPUTFORMAT ' -' 'org.apache.hadoop.mapred.TextInputFormat' ' -'OUTPUTFORMAT ' -' 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'' -'LOCATION' -' '!!{hive.metastore.warehouse.dir}!!/escape_comments_db.db/escape_comments_tbl1'' -'TBLPROPERTIES (' -' 'transient_lastDdlTime'='!!UNIXTIME!!')' -15 rows selected ->>> describe formatted escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe formatted escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe formatted escape_comments_tbl1 +CREATE TABLE `escape_comments_tbl1`( + `col1` string COMMENT 'a\nb\'\;') +COMMENT 'a\nb' +PARTITIONED BY ( + `p1` string COMMENT 'a\nb') +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### PREHOOK: query: describe formatted escape_comments_tbl1 PREHOOK: type: DESCTABLE PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe formatted escape_comments_tbl1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe formatted escape_comments_tbl1 -'col_name','data_type','comment' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'col1','string','a\nb';' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'p1','string','a\nb' -'','NULL','NULL' -'# Detailed Table Information','NULL','NULL' -'Database: ','escape_comments_db ','NULL' -'Owner: ','user ','NULL' -'CreateTime: ','!!TIMESTAMP!!','NULL' -'LastAccessTime: ','UNKNOWN ','NULL' -'Retention: ','0 ','NULL' -'Location: ','!!{hive.metastore.warehouse.dir}!!/escape_comments_db.db/escape_comments_tbl1','NULL' -'Table Type: ','MANAGED_TABLE ','NULL' -'Table Parameters:','NULL','NULL' -'','COLUMN_STATS_ACCURATE','{\"BASIC_STATS\":\"true\"}' -'','comment ','a\nb ' -'','numFiles ','0 ' -'','numRows ','0 ' -'','rawDataSize ','0 ' -'','totalSize ','0 ' -'','transient_lastDdlTime','!!UNIXTIME!! ' -'','NULL','NULL' -'# Storage Information','NULL','NULL' -'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','NULL' -'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','NULL' -'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','NULL' -'Compressed: ','No ','NULL' -'Num Buckets: ','-1 ','NULL' -'Bucket Columns: ','[] ','NULL' -'Sort Columns: ','[] ','NULL' -'Storage Desc Params:','NULL','NULL' -'','serialization.format','1 ' -36 rows selected ->>> describe pretty escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe pretty escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe pretty escape_comments_tbl1 +# col_name data_type comment + NULL NULL +col1 string a\nb'; + NULL NULL +# Partition Information NULL NULL +# col_name data_type comment + NULL NULL +p1 string a\nb + NULL NULL +# Detailed Table Information NULL NULL +Database: escape_comments_db NULL +Owner: user NULL +#### A masked pattern was here #### +LastAccessTime: UNKNOWN NULL +Retention: 0 NULL +#### A masked pattern was here #### +Table Type: MANAGED_TABLE NULL +Table Parameters: NULL NULL + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + comment a\nb + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 +#### A masked pattern was here #### + NULL NULL +# Storage Information NULL NULL +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe NULL +InputFormat: org.apache.hadoop.mapred.TextInputFormat NULL +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat NULL +Compressed: No NULL +Num Buckets: -1 NULL +Bucket Columns: [] NULL +Sort Columns: [] NULL +Storage Desc Params: NULL NULL + serialization.format 1 PREHOOK: query: describe pretty escape_comments_tbl1 PREHOOK: type: DESCTABLE PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe pretty escape_comments_tbl1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe pretty escape_comments_tbl1 -'col_name','data_type','comment' -'col_name ','data_type ','comment' -'','','' -'col1 ','string ','a' -' ',' ','b';' -'p1 ','string ','a' -' ',' ','b' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'col_name ','data_type ','comment' -'','','' -'p1 ','string ','a' -' ',' ','b' -12 rows selected ->>> describe escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe escape_comments_tbl1 +col_name data_type comment + +col1 string a + b'; +p1 string a + b + NULL NULL +# Partition Information NULL NULL +col_name data_type comment + +p1 string a + b PREHOOK: query: describe escape_comments_tbl1 PREHOOK: type: DESCTABLE PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe escape_comments_tbl1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe escape_comments_tbl1 -'col_name','data_type','comment' -'col1','string','a\nb';' -'p1','string','a\nb' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'p1','string','a\nb' -7 rows selected ->>> show create table escape_comments_view1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): show create table escape_comments_view1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:createtab_stmt, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): show create table escape_comments_view1 +col1 string a\nb'; +p1 string a\nb + NULL NULL +# Partition Information NULL NULL +# col_name data_type comment + NULL NULL +p1 string a\nb PREHOOK: query: show create table escape_comments_view1 PREHOOK: type: SHOW_CREATETABLE PREHOOK: Input: escape_comments_db@escape_comments_view1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: show create table escape_comments_view1 POSTHOOK: type: SHOW_CREATETABLE POSTHOOK: Input: escape_comments_db@escape_comments_view1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query show create table escape_comments_view1 -'createtab_stmt' -'CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1`' -1 row selected ->>> describe formatted escape_comments_view1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe formatted escape_comments_view1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe formatted escape_comments_view1 +CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1` PREHOOK: query: describe formatted escape_comments_view1 PREHOOK: type: DESCTABLE PREHOOK: Input: escape_comments_db@escape_comments_view1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe formatted escape_comments_view1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: escape_comments_db@escape_comments_view1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe formatted escape_comments_view1 -'col_name','data_type','comment' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'col1','string','a\nb' -'','NULL','NULL' -'# Detailed Table Information','NULL','NULL' -'Database: ','escape_comments_db ','NULL' -'Owner: ','user ','NULL' -'CreateTime: ','!!TIMESTAMP!!','NULL' -'LastAccessTime: ','UNKNOWN ','NULL' -'Retention: ','0 ','NULL' -'Table Type: ','VIRTUAL_VIEW ','NULL' -'Table Parameters:','NULL','NULL' -'','comment ','a\nb ' -'','transient_lastDdlTime','!!UNIXTIME!! ' -'','NULL','NULL' -'# Storage Information','NULL','NULL' -'SerDe Library: ','null ','NULL' -'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','NULL' -'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','NULL' -'Compressed: ','No ','NULL' -'Num Buckets: ','-1 ','NULL' -'Bucket Columns: ','[] ','NULL' -'Sort Columns: ','[] ','NULL' -'','NULL','NULL' -'# View Information','NULL','NULL' -'View Original Text: ','select col1 from escape_comments_tbl1','NULL' -'View Expanded Text: ','SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1`','NULL' -'View Rewrite Enabled:','No ','NULL' -28 rows selected ->>> show formatted index on escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): show formatted index on escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:idx_name, type:string, comment:from deserializer), FieldSchema(name:tab_name, type:string, comment:from deserializer), FieldSchema(name:col_names, type:string, comment:from deserializer), FieldSchema(name:idx_tab_name, type:string, comment:from deserializer), FieldSchema(name:idx_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): show formatted index on escape_comments_tbl1 +# col_name data_type comment + NULL NULL +col1 string a\nb + NULL NULL +# Detailed Table Information NULL NULL +Database: escape_comments_db NULL +Owner: user NULL +#### A masked pattern was here #### +LastAccessTime: UNKNOWN NULL +Retention: 0 NULL +Table Type: VIRTUAL_VIEW NULL +Table Parameters: NULL NULL + comment a\nb +#### A masked pattern was here #### + NULL NULL +# Storage Information NULL NULL +SerDe Library: null NULL +InputFormat: org.apache.hadoop.mapred.TextInputFormat NULL +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat NULL +Compressed: No NULL +Num Buckets: -1 NULL +Bucket Columns: [] NULL +Sort Columns: [] NULL + NULL NULL +# View Information NULL NULL +View Original Text: select col1 from escape_comments_tbl1 NULL +View Expanded Text: SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1` NULL +View Rewrite Enabled: No NULL PREHOOK: query: show formatted index on escape_comments_tbl1 PREHOOK: type: SHOWINDEXES -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: show formatted index on escape_comments_tbl1 POSTHOOK: type: SHOWINDEXES -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query show formatted index on escape_comments_tbl1 -'idx_name','tab_name','col_names','idx_tab_name','idx_type','comment' -'idx_name ','tab_name ','col_names ','idx_tab_name ','idx_type ','comment ' -'','NULL','NULL','NULL','NULL','NULL' -'','NULL','NULL','NULL','NULL','NULL' -'index2 ','escape_comments_tbl1','col1 ','escape_comments_db__escape_comments_tbl1_index2__','compact ','a\nb ' -4 rows selected ->>> ->>> drop database escape_comments_db cascade; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): drop database escape_comments_db cascade -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): drop database escape_comments_db cascade +idx_name tab_name col_names idx_tab_name idx_type comment + NULL NULL NULL NULL NULL + NULL NULL NULL NULL NULL +index2 escape_comments_tbl1 col1 escape_comments_db__escape_comments_tbl1_index2__ compact a\nb PREHOOK: query: drop database escape_comments_db cascade PREHOOK: type: DROPDATABASE PREHOOK: Input: database:escape_comments_db @@ -406,7 +207,6 @@ PREHOOK: Output: database:escape_comments_db PREHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ PREHOOK: Output: escape_comments_db@escape_comments_tbl1 PREHOOK: Output: escape_comments_db@escape_comments_view1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: drop database escape_comments_db cascade POSTHOOK: type: DROPDATABASE POSTHOOK: Input: database:escape_comments_db @@ -414,8 +214,3 @@ POSTHOOK: Output: database:escape_comments_db POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 POSTHOOK: Output: escape_comments_db@escape_comments_view1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query drop database escape_comments_db cascade -No rows affected ->>> !record diff --git ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out new file mode 100644 index 0000000..b6fa03f --- /dev/null +++ ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out @@ -0,0 +1,251 @@ +PREHOOK: query: explain +select 'a', 100 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 'a', 100 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 'a' (type: string), 100 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 'a', 100 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 'a', 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a 100 +PREHOOK: query: explain +select 1 + 1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 1 + 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 2 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 1 + 1 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 1 + 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2 +PREHOOK: query: explain +select explode(array('a', 'b')) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select explode(array('a', 'b')) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array('a','b') (type: array) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + function name: explode + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select explode(array('a', 'b')) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select explode(array('a', 'b')) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a +b +PREHOOK: query: explain +select 'a', 100 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 'a', 100 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 'a' (type: string), 100 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 'a', 100 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 'a', 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a 100 +PREHOOK: query: explain +select 1 + 1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 1 + 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 2 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 1 + 1 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 1 + 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2 +PREHOOK: query: explain +select explode(array('a', 'b')) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select explode(array('a', 'b')) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array('a','b') (type: array) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + function name: explode + ListSink + +PREHOOK: query: select explode(array('a', 'b')) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select explode(array('a', 'b')) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a +b +PREHOOK: query: explain +select 2 + 3,x from (select 1 + 2 x) X +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 2 + 3,x from (select 1 + 2 x) X +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 5 (type: int), 3 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 2 + 3,x from (select 1 + 2 x) X +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 2 + 3,x from (select 1 + 2 x) X +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +5 3