diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java index 11526a7..cff48d6 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -148,6 +148,8 @@ // Indicates if this instance of beeline is running in compatibility mode, or beeline mode private boolean isBeeLine = true; + private boolean isTestMode = false; + private static final Options options = new Options(); public static final String BEELINE_DEFAULT_JDBC_DRIVER = "org.apache.hive.jdbc.HiveDriver"; @@ -2386,4 +2388,12 @@ public String getCurrentDatabase() { public void setCurrentDatabase(String currentDatabase) { this.currentDatabase = currentDatabase; } + + void setIsTestMode(boolean isTestMode) { + this.isTestMode = isTestMode; + } + + boolean isTestMode() { + return isTestMode; + } } diff --git beeline/src/java/org/apache/hive/beeline/Commands.java beeline/src/java/org/apache/hive/beeline/Commands.java index 2578728..3b65682 100644 --- beeline/src/java/org/apache/hive/beeline/Commands.java +++ beeline/src/java/org/apache/hive/beeline/Commands.java @@ -978,7 +978,7 @@ private boolean executeInternal(String sql, boolean call) { hasResults = ((CallableStatement) stmnt).execute(); } else { stmnt = beeLine.createStatement(); - if (beeLine.getOpts().isSilent()) { + if (!beeLine.isTestMode() && beeLine.getOpts().isSilent()) { hasResults = stmnt.execute(sql); } else { InPlaceUpdateStream.EventNotifier eventNotifier = @@ -1342,7 +1342,11 @@ private void updateQueryLog() { try { List queryLogs = hiveStatement.getQueryLog(); for (String log : queryLogs) { - commands.beeLine.info(log); + if (!commands.beeLine.isTestMode()) { + commands.beeLine.info(log); + } else { + commands.beeLine.error(log); + } } if (!queryLogs.isEmpty()) { notifier.operationLogShowedToUser(); @@ -1386,7 +1390,11 @@ private void showRemainingLogsIfAny(Statement statement) { return; } for (String log : logs) { - beeLine.info(log); + if (!beeLine.isTestMode()) { + beeLine.info(log); + } else { + beeLine.error(log); + } } } while (logs.size() > 0); } else { @@ -1810,7 +1818,7 @@ public boolean run(String line) { if (cmd != null) { // we're continuing an existing command - cmd.append(" \n"); + cmd.append("\n"); cmd.append(scriptLine); if (trimmedLine.endsWith(";")) { // this command has terminated diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 3c98a57..b76233f 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -744,7 +744,8 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\ encryption_ctas.q beeline.positive.include=drop_with_concurrency.q,\ - escape_comments.q + escape_comments.q,\ + select_dummy_source.q minimr.query.negative.files=cluster_tasklog_retrieval.q,\ file_with_header_footer_negative.q,\ diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index 0d63f5d..5718a49 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -23,10 +23,10 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter; -import org.apache.hive.beeline.qfile.QFile; -import org.apache.hive.beeline.qfile.QFile.QFileBuilder; -import org.apache.hive.beeline.qfile.QFileBeeLineClient; -import org.apache.hive.beeline.qfile.QFileBeeLineClient.QFileClientBuilder; +import org.apache.hive.beeline.QFile; +import org.apache.hive.beeline.QFile.QFileBuilder; +import org.apache.hive.beeline.QFileBeeLineClient; +import org.apache.hive.beeline.QFileBeeLineClient.QFileClientBuilder; import org.apache.hive.jdbc.miniHS2.MiniHS2; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -94,12 +94,9 @@ public void beforeClass() throws Exception { .setPassword("password"); fileBuilder = new QFileBuilder() - .setHiveRootDirectory(hiveRootDirectory) .setLogDirectory(logDirectory) .setQueryDirectory(queryDirectory) - .setResultsDirectory(resultsDirectory) - .setScratchDirectoryString(hiveConf.getVar(HiveConf.ConfVars.SCRATCHDIR)) - .setWarehouseDirectoryString(hiveConf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE)); + .setResultsDirectory(resultsDirectory); runInfraScript(initScript, new File(logDirectory, "init.beeline"), new File(logDirectory, "init.raw")); diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java itests/util/src/main/java/org/apache/hive/beeline/QFile.java similarity index 69% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java rename to itests/util/src/main/java/org/apache/hive/beeline/QFile.java index 890a167..b4ee20c 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFile.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hive.beeline.qfile; +package org.apache.hive.beeline; import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.ql.QTestProcessExecResult; @@ -48,7 +48,7 @@ + " - Client log file: %5\n" + " - Client log files before the test: %6\n" + " - Client log files after the test: %7\n" - + " - Hiveserver2 log file: %8\n"; + + " - HiveServer2 log file: %8\n"; private String name; private File inputFile; @@ -59,7 +59,6 @@ private File beforeExecuteLogFile; private File afterExecuteLogFile; private static RegexFilterSet staticFilterSet = getStaticFilterSet(); - private RegexFilterSet specificFilterSet; private QFile() {} @@ -102,7 +101,7 @@ public String getDebugHint() { public void filterOutput() throws IOException { String rawOutput = FileUtils.readFileToString(rawOutputFile); - String filteredOutput = staticFilterSet.filter(specificFilterSet.filter(rawOutput)); + String filteredOutput = staticFilterSet.filter(rawOutput); FileUtils.writeStringToFile(outputFile, filteredOutput); } @@ -189,42 +188,16 @@ public String filter(String input) { // These are the filters which are common for every QTest. // Check specificFilterSet for QTest specific ones. private static RegexFilterSet getStaticFilterSet() { - // Extract the leading four digits from the unix time value. - // Use this as a prefix in order to increase the selectivity - // of the unix time stamp replacement regex. - String currentTimePrefix = Long.toString(System.currentTimeMillis()).substring(0, 4); + String maskPattern = "\n#### A masked pattern was here ####\n"; - String userName = System.getProperty("user.name"); - - String timePattern = "(Mon|Tue|Wed|Thu|Fri|Sat|Sun) " - + "(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " - + "\\d{2} \\d{2}:\\d{2}:\\d{2} \\w+ 20\\d{2}"; // Pattern to remove the timestamp and other infrastructural info from the out file - String logPattern = "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d*\\s+\\S+\\s+\\[" + - ".*\\]\\s+\\S+:\\s+"; - String operatorPattern = "\"(CONDITION|COPY|DEPENDENCY_COLLECTION|DDL" - + "|EXPLAIN|FETCH|FIL|FS|FUNCTION|GBY|HASHTABLEDUMMY|HASTTABLESINK|JOIN" - + "|LATERALVIEWFORWARD|LIM|LVJ|MAP|MAPJOIN|MAPRED|MAPREDLOCAL|MOVE|OP|RS" - + "|SCR|SEL|STATS|TS|UDTF|UNION)_\\d+\""; - + String logPattern = "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d*\\s+\\S+\\s+\\[" + + ".*\\]\\s+\\S+:\\s+"; return new RegexFilterSet() .addFilter(logPattern, "") - .addFilter("(?s)\nWaiting to acquire compile lock:.*?Acquired the compile lock.\n", - "\nAcquired the compile lock.\n") - .addFilter("Getting log thread is interrupted, since query is done!\n", "") - .addFilter("going to print operations logs\n", "") - .addFilter("printed operations logs\n", "") - .addFilter("\\(queryId=[^\\)]*\\)", "queryId=(!!{queryId}!!)") - .addFilter("file:/\\w\\S+", "file:/!!ELIDED!!") - .addFilter("pfile:/\\w\\S+", "pfile:/!!ELIDED!!") - .addFilter("hdfs:/\\w\\S+", "hdfs:/!!ELIDED!!") - .addFilter("last_modified_by=\\w+", "last_modified_by=!!ELIDED!!") - .addFilter(timePattern, "!!TIMESTAMP!!") - .addFilter("(\\D)" + currentTimePrefix + "\\d{6}(\\D)", "$1!!UNIXTIME!!$2") - .addFilter("(\\D)" + currentTimePrefix + "\\d{9}(\\D)", "$1!!UNIXTIMEMILLIS!!$2") - .addFilter(userName, "!!{user.name}!!") - .addFilter(operatorPattern, "\"$1_!!ELIDED!!\"") - .addFilter("Time taken: [0-9\\.]* seconds", "Time taken: !!ELIDED!! seconds"); + .addFilter("\n.*/tmp/.*\n", maskPattern) + .addFilter("\n.*CreateTime.*\n", maskPattern) + .addFilter("\n.*transient_lastDdlTime.*\n", maskPattern); } /** @@ -235,9 +208,6 @@ private static RegexFilterSet getStaticFilterSet() { private File queryDirectory; private File logDirectory; private File resultsDirectory; - private String scratchDirectoryString; - private String warehouseDirectoryString; - private File hiveRootDirectory; public QFileBuilder() { } @@ -257,21 +227,6 @@ public QFileBuilder setResultsDirectory(File resultsDirectory) { return this; } - public QFileBuilder setScratchDirectoryString(String scratchDirectoryString) { - this.scratchDirectoryString = scratchDirectoryString; - return this; - } - - public QFileBuilder setWarehouseDirectoryString(String warehouseDirectoryString) { - this.warehouseDirectoryString = warehouseDirectoryString; - return this; - } - - public QFileBuilder setHiveRootDirectory(File hiveRootDirectory) { - this.hiveRootDirectory = hiveRootDirectory; - return this; - } - public QFile getQFile(String name) throws IOException { QFile result = new QFile(); result.name = name; @@ -282,15 +237,6 @@ public QFile getQFile(String name) throws IOException { result.logFile = new File(logDirectory, name + ".q.beeline"); result.beforeExecuteLogFile = new File(logDirectory, name + ".q.beforeExecute.log"); result.afterExecuteLogFile = new File(logDirectory, name + ".q.afterExecute.log"); - // These are the filters which are specific for the given QTest. - // Check staticFilterSet for common filters. - result.specificFilterSet = new RegexFilterSet() - .addFilter(scratchDirectoryString + "[\\w\\-/]+", "!!{hive.exec.scratchdir}!!") - .addFilter(warehouseDirectoryString, "!!{hive.metastore.warehouse.dir}!!") - .addFilter(resultsDirectory.getAbsolutePath(), "!!{expectedDirectory}!!") - .addFilter(logDirectory.getAbsolutePath(), "!!{outputDirectory}!!") - .addFilter(queryDirectory.getAbsolutePath(), "!!{qFileDirectory}!!") - .addFilter(hiveRootDirectory.getAbsolutePath(), "!!{hive.root}!!"); return result; } } diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java similarity index 90% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java rename to itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java index 760fde6..a7a3ec1 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java @@ -16,9 +16,7 @@ * limitations under the License. */ -package org.apache.hive.beeline.qfile; - -import org.apache.hive.beeline.BeeLine; +package org.apache.hive.beeline; import java.io.File; import java.io.IOException; @@ -54,7 +52,6 @@ protected QFileBeeLineClient(String jdbcUrl, String jdbcDriver, String username, public void execute(String[] commands, File resultFile) throws SQLException { beeLine.runCommands( new String[] { - "!set outputformat csv", "!record " + resultFile.getAbsolutePath() }); @@ -73,14 +70,24 @@ private void beforeExecute(QFile qFile) throws SQLException { "SHOW TABLES;", "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;", "CREATE DATABASE `" + qFile.getName() + "`;", - "USE `" + qFile.getName() + "`;" + "USE `" + qFile.getName() + "`;", + "!set outputformat tsv2" }, qFile.getBeforeExecuteLogFile()); + beeLine.getOpts().setSilent(true); + beeLine.getOpts().setVerbose(false); + beeLine.getOpts().setShowHeader(false); + beeLine.setIsTestMode(true); } private void afterExecute(QFile qFile) throws SQLException { + beeLine.getOpts().setSilent(false); + beeLine.getOpts().setVerbose(true); + beeLine.getOpts().setShowHeader(true); + beeLine.setIsTestMode(false); execute( new String[] { + "!set outputformat table", "USE default;", "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;", }, diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java itests/util/src/main/java/org/apache/hive/beeline/package-info.java similarity index 89% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java rename to itests/util/src/main/java/org/apache/hive/beeline/package-info.java index fcd50ec..e05ac0a 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java +++ itests/util/src/main/java/org/apache/hive/beeline/package-info.java @@ -17,6 +17,6 @@ */ /** - * Package for the BeeLine specific QTest file classes. + * Package for the BeeLine specific QTest classes. */ -package org.apache.hive.beeline.qfile; +package org.apache.hive.beeline; diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index d981119..6f2a58c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -1375,7 +1375,7 @@ private ReentrantLock tryAcquireCompileLock(boolean isParallelEnabled, OperationLog ol = OperationLog.getCurrentOperationLog(); if (ol != null) { - ol.writeOperationLog(LoggingLevel.EXECUTION, "Waiting to acquire compile lock.\n"); + ol.writeOperationLog(LoggingLevel.EXECUTION, "Waiting to acquire compile lock.\n", false); } if (maxCompileLockWaitTime > 0) { @@ -1398,7 +1398,7 @@ private ReentrantLock tryAcquireCompileLock(boolean isParallelEnabled, LOG.debug(lockAcquiredMsg); if (ol != null) { - ol.writeOperationLog(LoggingLevel.EXECUTION, lockAcquiredMsg + "\n"); + ol.writeOperationLog(LoggingLevel.EXECUTION, lockAcquiredMsg + "\n", false); } return compileLock; } @@ -2097,7 +2097,7 @@ private void logMrWarning(int mrJobs) { // Propagate warning to beeline via operation log. OperationLog ol = OperationLog.getCurrentOperationLog(); if (ol != null) { - ol.writeOperationLog(LoggingLevel.EXECUTION, warning + "\n"); + ol.writeOperationLog(LoggingLevel.EXECUTION, warning + "\n", false); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java index 08d0544..ee1c94c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java @@ -340,7 +340,7 @@ private void logTypeWarning(String colName, String colType) { // Propagate warning to beeline via operation log. OperationLog ol = OperationLog.getCurrentOperationLog(); if (ol != null) { - ol.writeOperationLog(LoggingLevel.EXECUTION, warning + "\n"); + ol.writeOperationLog(LoggingLevel.EXECUTION, warning + "\n", true); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java index 18216f2..04c9f45 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java @@ -20,6 +20,8 @@ import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.io.IOUtils; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.LogEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,6 +39,8 @@ private final String operationName; private final LogFile logFile; + private final LogFile testLogFile; + private final boolean isInTest; private LoggingLevel opLoggingLevel = LoggingLevel.UNKNOWN; public PrintStream getPrintStream() { @@ -47,14 +51,57 @@ public PrintStream getPrintStream() { NONE, EXECUTION, PERFORMANCE, VERBOSE, UNKNOWN } - public OperationLog(String name, File file, HiveConf hiveConf) throws FileNotFoundException { + private File createLogFile(File parentDir, String fileName) throws IOException { + File operationLogFile = new File(parentDir, fileName); + + // create log file + try { + if (operationLogFile.exists()) { + LOG.warn("The operation log file should not exist, but it is already there: " + + operationLogFile.getAbsolutePath()); + operationLogFile.delete(); + } + if (!operationLogFile.getParentFile().exists()) { + LOG.warn("Operations log directory for this session does not exist, it could have been deleted " + + "externally. Recreating the directory for future queries in this session but the older operation " + + "logs for this session are no longer available"); + if (!operationLogFile.getParentFile().mkdir()) { + throw new IOException("Log directory for this session could not be created"); + } + } + if (!operationLogFile.createNewFile()) { + // the log file already exists and cannot be deleted. + // If it can be read/written, keep its contents and use it. + if (!operationLogFile.canRead() || !operationLogFile.canWrite()) { + throw new IOException("The already existed operation log file cannot be recreated, " + + "and it cannot be read or written: " + operationLogFile.getAbsolutePath()); + } + } + } catch (Exception e) { + throw new IOException("Unable to create operation log file: " + operationLogFile.getAbsolutePath(), e); + } + return operationLogFile; + } + + public OperationLog(String name, File dir, String key, HiveConf hiveConf) + throws IOException { operationName = name; - logFile = new LogFile(file); + logFile = new LogFile(createLogFile(dir, key)); if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) { String logLevel = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL); opLoggingLevel = getLoggingLevel(logLevel); } + + // If in test mod create a test log file which will contain only logs which are supposed to + // be written to the qtest output + if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) { + testLogFile = new LogFile(createLogFile(dir, key + ".test")); + isInTest = true; + } else { + testLogFile = null; + isInTest = false; + } } public static LoggingLevel getLoggingLevel (String mode) { @@ -102,17 +149,25 @@ public static void removeCurrentOperationLog() { * Write operation execution logs into log file * @param operationLogMessage one line of log emitted from log4j */ - public void writeOperationLog(String operationLogMessage) { + public void writeOperationLog(LogEvent logEvent, String operationLogMessage) { logFile.write(operationLogMessage); + if (isInTest && logEvent.getLevel().isMoreSpecificThan(Level.ERROR)) { + testLogFile.write(operationLogMessage); + } } /** * Write operation execution logs into log file + * @param level The log level of the message * @param operationLogMessage one line of log emitted from log4j + * @param addToTest Add the message to the test logs as well */ - public void writeOperationLog(LoggingLevel level, String operationLogMessage) { + public void writeOperationLog(LoggingLevel level, String operationLogMessage, boolean addToTest) { if (opLoggingLevel.compareTo(level) < 0) return; logFile.write(operationLogMessage); + if (addToTest && isInTest) { + testLogFile.write(operationLogMessage); + } } @@ -124,15 +179,24 @@ public void writeOperationLog(LoggingLevel level, String operationLogMessage) { * @throws java.sql.SQLException */ public List readOperationLog(boolean isFetchFirst, long maxRows) - throws SQLException{ - return logFile.read(isFetchFirst, maxRows); + throws SQLException { + if (isInTest) { + return testLogFile.read(isFetchFirst, maxRows); + } else { + return logFile.read(isFetchFirst, maxRows); + } } /** * Close this OperationLog when operation is closed. The log file will be removed. */ public void close() { - logFile.remove(); + if (isInTest) { + logFile.remove(false); + testLogFile.remove(false); + } else { + logFile.remove(true); + } } /** @@ -166,7 +230,7 @@ synchronized void write(String msg) { return readResults(maxRows); } - synchronized void remove() { + synchronized void remove(boolean removeLog) { try { if (in != null) { in.close(); @@ -174,7 +238,7 @@ synchronized void remove() { if (out != null) { out.close(); } - if (!isRemoved) { + if (!isRemoved && removeLog) { FileUtils.forceDelete(file); isRemoved = true; } diff --git ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out index d22c9ec..1342674 100644 --- ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out +++ ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out @@ -1,67 +1,20 @@ ->>> !run !!{qFileDirectory}!!/drop_with_concurrency.q ->>> set hive.lock.numretries=1; -No rows affected ->>> set hive.lock.sleep.between.retries=1; -No rows affected ->>> set hive.support.concurrency=true; -No rows affected ->>> set hive.lock.manager=org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager; -No rows affected ->>> ->>> drop table if exists drop_with_concurrency_1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): drop table if exists drop_with_concurrency_1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): drop table if exists drop_with_concurrency_1 PREHOOK: query: drop table if exists drop_with_concurrency_1 PREHOOK: type: DROPTABLE -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: drop table if exists drop_with_concurrency_1 POSTHOOK: type: DROPTABLE -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query drop table if exists drop_with_concurrency_1 -No rows affected ->>> create table drop_with_concurrency_1 (c1 int); -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create table drop_with_concurrency_1 (c1 int) -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create table drop_with_concurrency_1 (c1 int) PREHOOK: query: create table drop_with_concurrency_1 (c1 int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:drop_with_concurrency PREHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: create table drop_with_concurrency_1 (c1 int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:drop_with_concurrency POSTHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create table drop_with_concurrency_1 (c1 int) -No rows affected ->>> drop table drop_with_concurrency_1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): drop table drop_with_concurrency_1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): drop table drop_with_concurrency_1 PREHOOK: query: drop table drop_with_concurrency_1 PREHOOK: type: DROPTABLE PREHOOK: Input: drop_with_concurrency@drop_with_concurrency_1 PREHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: drop table drop_with_concurrency_1 POSTHOOK: type: DROPTABLE POSTHOOK: Input: drop_with_concurrency@drop_with_concurrency_1 POSTHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query drop table drop_with_concurrency_1 -No rows affected ->>> !record diff --git ql/src/test/results/clientpositive/beeline/escape_comments.q.out ql/src/test/results/clientpositive/beeline/escape_comments.q.out index 5f9df93..1ab6fe4 100644 --- ql/src/test/results/clientpositive/beeline/escape_comments.q.out +++ ql/src/test/results/clientpositive/beeline/escape_comments.q.out @@ -1,90 +1,34 @@ ->>> !run !!{qFileDirectory}!!/escape_comments.q ->>> create database escape_comments_db comment 'a\nb'; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create database escape_comments_db comment 'a\nb' -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create database escape_comments_db comment 'a\nb' PREHOOK: query: create database escape_comments_db comment 'a\nb' PREHOOK: type: CREATEDATABASE PREHOOK: Output: database:escape_comments_db -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: create database escape_comments_db comment 'a\nb' POSTHOOK: type: CREATEDATABASE POSTHOOK: Output: database:escape_comments_db -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create database escape_comments_db comment 'a\nb' -No rows affected ->>> use escape_comments_db; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): use escape_comments_db -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): use escape_comments_db PREHOOK: query: use escape_comments_db PREHOOK: type: SWITCHDATABASE PREHOOK: Input: database:escape_comments_db -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: use escape_comments_db POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:escape_comments_db -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query use escape_comments_db -No rows affected ->>> create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb'); -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -PREHOOK: query: create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' +PREHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' partitioned by (p1 string comment 'a\nb') PREHOOK: type: CREATETABLE PREHOOK: Output: database:escape_comments_db PREHOOK: Output: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode -POSTHOOK: query: create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' +POSTHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' partitioned by (p1 string comment 'a\nb') POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:escape_comments_db POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -No rows affected ->>> create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col1, type:string, comment:null)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' as select col1 from escape_comments_tbl1 PREHOOK: type: CREATEVIEW PREHOOK: Input: escape_comments_db@escape_comments_tbl1 PREHOOK: Output: database:escape_comments_db PREHOOK: Output: escape_comments_db@escape_comments_view1 -Starting task [Stage-1:DDL] in serial mode -POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' as select col1 from escape_comments_tbl1 POSTHOOK: type: CREATEVIEW POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 @@ -92,313 +36,170 @@ POSTHOOK: Output: database:escape_comments_db POSTHOOK: Output: escape_comments_db@escape_comments_view1 POSTHOOK: Lineage: escape_comments_view1.col1 SIMPLE [(escape_comments_tbl1)escape_comments_tbl1.FieldSchema(name:col1, type:string, comment:a b';), ] -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -No rows affected ->>> create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb'; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' PREHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' PREHOOK: type: CREATEINDEX PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' POSTHOOK: type: CREATEINDEX POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -No rows affected ->>> ->>> describe database extended escape_comments_db; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe database extended escape_comments_db -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:db_name, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer), FieldSchema(name:location, type:string, comment:from deserializer), FieldSchema(name:owner_name, type:string, comment:from deserializer), FieldSchema(name:owner_type, type:string, comment:from deserializer), FieldSchema(name:parameters, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe database extended escape_comments_db PREHOOK: query: describe database extended escape_comments_db PREHOOK: type: DESCDATABASE PREHOOK: Input: database:escape_comments_db -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe database extended escape_comments_db POSTHOOK: type: DESCDATABASE POSTHOOK: Input: database:escape_comments_db -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe database extended escape_comments_db -'db_name','comment','location','owner_name','owner_type','parameters' -'escape_comments_db','a\nb','location/in/test','user','USER','' -1 row selected ->>> describe database escape_comments_db; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe database escape_comments_db -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:db_name, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer), FieldSchema(name:location, type:string, comment:from deserializer), FieldSchema(name:owner_name, type:string, comment:from deserializer), FieldSchema(name:owner_type, type:string, comment:from deserializer), FieldSchema(name:parameters, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe database escape_comments_db +escape_comments_db a\nb location/in/test user USER PREHOOK: query: describe database escape_comments_db PREHOOK: type: DESCDATABASE PREHOOK: Input: database:escape_comments_db -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe database escape_comments_db POSTHOOK: type: DESCDATABASE POSTHOOK: Input: database:escape_comments_db -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe database escape_comments_db -'db_name','comment','location','owner_name','owner_type','parameters' -'escape_comments_db','a\nb','location/in/test','user','USER','' -1 row selected ->>> show create table escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): show create table escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:createtab_stmt, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): show create table escape_comments_tbl1 +escape_comments_db a\nb location/in/test user USER PREHOOK: query: show create table escape_comments_tbl1 PREHOOK: type: SHOW_CREATETABLE PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: show create table escape_comments_tbl1 POSTHOOK: type: SHOW_CREATETABLE POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query show create table escape_comments_tbl1 -'createtab_stmt' -'CREATE TABLE `escape_comments_tbl1`(' -' `col1` string COMMENT 'a\nb\'\;')' -'COMMENT 'a\nb'' -'PARTITIONED BY ( ' -' `p1` string COMMENT 'a\nb')' -'ROW FORMAT SERDE ' -' 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' -'STORED AS INPUTFORMAT ' -' 'org.apache.hadoop.mapred.TextInputFormat' ' -'OUTPUTFORMAT ' -' 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'' -'LOCATION' -' '!!{hive.metastore.warehouse.dir}!!/escape_comments_db.db/escape_comments_tbl1'' -'TBLPROPERTIES (' -' 'transient_lastDdlTime'='!!UNIXTIME!!')' -15 rows selected ->>> describe formatted escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe formatted escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe formatted escape_comments_tbl1 +CREATE TABLE `escape_comments_tbl1`( + `col1` string COMMENT 'a\nb\'\;') +COMMENT 'a\nb' +PARTITIONED BY ( + `p1` string COMMENT 'a\nb') +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### PREHOOK: query: describe formatted escape_comments_tbl1 PREHOOK: type: DESCTABLE PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe formatted escape_comments_tbl1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe formatted escape_comments_tbl1 -'col_name','data_type','comment' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'col1','string','a\nb';' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'p1','string','a\nb' -'','NULL','NULL' -'# Detailed Table Information','NULL','NULL' -'Database: ','escape_comments_db ','NULL' -'Owner: ','user ','NULL' -'CreateTime: ','!!TIMESTAMP!!','NULL' -'LastAccessTime: ','UNKNOWN ','NULL' -'Retention: ','0 ','NULL' -'Location: ','!!{hive.metastore.warehouse.dir}!!/escape_comments_db.db/escape_comments_tbl1','NULL' -'Table Type: ','MANAGED_TABLE ','NULL' -'Table Parameters:','NULL','NULL' -'','COLUMN_STATS_ACCURATE','{\"BASIC_STATS\":\"true\"}' -'','comment ','a\nb ' -'','numFiles ','0 ' -'','numRows ','0 ' -'','rawDataSize ','0 ' -'','totalSize ','0 ' -'','transient_lastDdlTime','!!UNIXTIME!! ' -'','NULL','NULL' -'# Storage Information','NULL','NULL' -'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','NULL' -'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','NULL' -'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','NULL' -'Compressed: ','No ','NULL' -'Num Buckets: ','-1 ','NULL' -'Bucket Columns: ','[] ','NULL' -'Sort Columns: ','[] ','NULL' -'Storage Desc Params:','NULL','NULL' -'','serialization.format','1 ' -36 rows selected ->>> describe pretty escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe pretty escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe pretty escape_comments_tbl1 +# col_name data_type comment + NULL NULL +col1 string a\nb'; + NULL NULL +# Partition Information NULL NULL +# col_name data_type comment + NULL NULL +p1 string a\nb + NULL NULL +# Detailed Table Information NULL NULL +Database: escape_comments_db NULL +Owner: user NULL +#### A masked pattern was here #### +LastAccessTime: UNKNOWN NULL +Retention: 0 NULL +#### A masked pattern was here #### +Table Type: MANAGED_TABLE NULL +Table Parameters: NULL NULL + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + comment a\nb + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 +#### A masked pattern was here #### + NULL NULL +# Storage Information NULL NULL +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe NULL +InputFormat: org.apache.hadoop.mapred.TextInputFormat NULL +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat NULL +Compressed: No NULL +Num Buckets: -1 NULL +Bucket Columns: [] NULL +Sort Columns: [] NULL +Storage Desc Params: NULL NULL + serialization.format 1 PREHOOK: query: describe pretty escape_comments_tbl1 PREHOOK: type: DESCTABLE PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe pretty escape_comments_tbl1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe pretty escape_comments_tbl1 -'col_name','data_type','comment' -'col_name ','data_type ','comment' -'','','' -'col1 ','string ','a' -' ',' ','b';' -'p1 ','string ','a' -' ',' ','b' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'col_name ','data_type ','comment' -'','','' -'p1 ','string ','a' -' ',' ','b' -12 rows selected ->>> describe escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe escape_comments_tbl1 +col_name data_type comment + +col1 string a + b'; +p1 string a + b + NULL NULL +# Partition Information NULL NULL +col_name data_type comment + +p1 string a + b PREHOOK: query: describe escape_comments_tbl1 PREHOOK: type: DESCTABLE PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe escape_comments_tbl1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe escape_comments_tbl1 -'col_name','data_type','comment' -'col1','string','a\nb';' -'p1','string','a\nb' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'p1','string','a\nb' -7 rows selected ->>> show create table escape_comments_view1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): show create table escape_comments_view1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:createtab_stmt, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): show create table escape_comments_view1 +col1 string a\nb'; +p1 string a\nb + NULL NULL +# Partition Information NULL NULL +# col_name data_type comment + NULL NULL +p1 string a\nb PREHOOK: query: show create table escape_comments_view1 PREHOOK: type: SHOW_CREATETABLE PREHOOK: Input: escape_comments_db@escape_comments_view1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: show create table escape_comments_view1 POSTHOOK: type: SHOW_CREATETABLE POSTHOOK: Input: escape_comments_db@escape_comments_view1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query show create table escape_comments_view1 -'createtab_stmt' -'CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1`' -1 row selected ->>> describe formatted escape_comments_view1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): describe formatted escape_comments_view1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): describe formatted escape_comments_view1 +CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1` PREHOOK: query: describe formatted escape_comments_view1 PREHOOK: type: DESCTABLE PREHOOK: Input: escape_comments_db@escape_comments_view1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: describe formatted escape_comments_view1 POSTHOOK: type: DESCTABLE POSTHOOK: Input: escape_comments_db@escape_comments_view1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query describe formatted escape_comments_view1 -'col_name','data_type','comment' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'col1','string','a\nb' -'','NULL','NULL' -'# Detailed Table Information','NULL','NULL' -'Database: ','escape_comments_db ','NULL' -'Owner: ','user ','NULL' -'CreateTime: ','!!TIMESTAMP!!','NULL' -'LastAccessTime: ','UNKNOWN ','NULL' -'Retention: ','0 ','NULL' -'Table Type: ','VIRTUAL_VIEW ','NULL' -'Table Parameters:','NULL','NULL' -'','comment ','a\nb ' -'','transient_lastDdlTime','!!UNIXTIME!! ' -'','NULL','NULL' -'# Storage Information','NULL','NULL' -'SerDe Library: ','null ','NULL' -'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','NULL' -'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','NULL' -'Compressed: ','No ','NULL' -'Num Buckets: ','-1 ','NULL' -'Bucket Columns: ','[] ','NULL' -'Sort Columns: ','[] ','NULL' -'','NULL','NULL' -'# View Information','NULL','NULL' -'View Original Text: ','select col1 from escape_comments_tbl1','NULL' -'View Expanded Text: ','SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1`','NULL' -'View Rewrite Enabled:','No ','NULL' -28 rows selected ->>> show formatted index on escape_comments_tbl1; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): show formatted index on escape_comments_tbl1 -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:idx_name, type:string, comment:from deserializer), FieldSchema(name:tab_name, type:string, comment:from deserializer), FieldSchema(name:col_names, type:string, comment:from deserializer), FieldSchema(name:idx_tab_name, type:string, comment:from deserializer), FieldSchema(name:idx_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): show formatted index on escape_comments_tbl1 +# col_name data_type comment + NULL NULL +col1 string a\nb + NULL NULL +# Detailed Table Information NULL NULL +Database: escape_comments_db NULL +Owner: user NULL +#### A masked pattern was here #### +LastAccessTime: UNKNOWN NULL +Retention: 0 NULL +Table Type: VIRTUAL_VIEW NULL +Table Parameters: NULL NULL + comment a\nb +#### A masked pattern was here #### + NULL NULL +# Storage Information NULL NULL +SerDe Library: null NULL +InputFormat: org.apache.hadoop.mapred.TextInputFormat NULL +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat NULL +Compressed: No NULL +Num Buckets: -1 NULL +Bucket Columns: [] NULL +Sort Columns: [] NULL + NULL NULL +# View Information NULL NULL +View Original Text: select col1 from escape_comments_tbl1 NULL +View Expanded Text: SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1` NULL +View Rewrite Enabled: No NULL PREHOOK: query: show formatted index on escape_comments_tbl1 PREHOOK: type: SHOWINDEXES -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: show formatted index on escape_comments_tbl1 POSTHOOK: type: SHOWINDEXES -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query show formatted index on escape_comments_tbl1 -'idx_name','tab_name','col_names','idx_tab_name','idx_type','comment' -'idx_name ','tab_name ','col_names ','idx_tab_name ','idx_type ','comment ' -'','NULL','NULL','NULL','NULL','NULL' -'','NULL','NULL','NULL','NULL','NULL' -'index2 ','escape_comments_tbl1','col1 ','escape_comments_db__escape_comments_tbl1_index2__','compact ','a\nb ' -4 rows selected ->>> ->>> drop database escape_comments_db cascade; -Acquired the compile lock. -Compiling commandqueryId=(!!{queryId}!!): drop database escape_comments_db cascade -Semantic Analysis Completed -Returning Hive schema: Schema(fieldSchemas:null, properties:null) -Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -Executing commandqueryId=(!!{queryId}!!): drop database escape_comments_db cascade +idx_name tab_name col_names idx_tab_name idx_type comment + NULL NULL NULL NULL NULL + NULL NULL NULL NULL NULL +index2 escape_comments_tbl1 col1 escape_comments_db__escape_comments_tbl1_index2__ compact a\nb PREHOOK: query: drop database escape_comments_db cascade PREHOOK: type: DROPDATABASE PREHOOK: Input: database:escape_comments_db @@ -406,7 +207,6 @@ PREHOOK: Output: database:escape_comments_db PREHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ PREHOOK: Output: escape_comments_db@escape_comments_tbl1 PREHOOK: Output: escape_comments_db@escape_comments_view1 -Starting task [Stage-0:DDL] in serial mode POSTHOOK: query: drop database escape_comments_db cascade POSTHOOK: type: DROPDATABASE POSTHOOK: Input: database:escape_comments_db @@ -414,8 +214,3 @@ POSTHOOK: Output: database:escape_comments_db POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 POSTHOOK: Output: escape_comments_db@escape_comments_view1 -Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -OK -Shutting down query drop database escape_comments_db cascade -No rows affected ->>> !record diff --git ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out new file mode 100644 index 0000000..b6fa03f --- /dev/null +++ ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out @@ -0,0 +1,251 @@ +PREHOOK: query: explain +select 'a', 100 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 'a', 100 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 'a' (type: string), 100 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 'a', 100 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 'a', 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a 100 +PREHOOK: query: explain +select 1 + 1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 1 + 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 2 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 1 + 1 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 1 + 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2 +PREHOOK: query: explain +select explode(array('a', 'b')) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select explode(array('a', 'b')) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array('a','b') (type: array) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + function name: explode + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select explode(array('a', 'b')) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select explode(array('a', 'b')) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a +b +PREHOOK: query: explain +select 'a', 100 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 'a', 100 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 'a' (type: string), 100 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 'a', 100 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 'a', 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a 100 +PREHOOK: query: explain +select 1 + 1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 1 + 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 2 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 1 + 1 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 1 + 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2 +PREHOOK: query: explain +select explode(array('a', 'b')) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select explode(array('a', 'b')) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array('a','b') (type: array) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + function name: explode + ListSink + +PREHOOK: query: select explode(array('a', 'b')) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select explode(array('a', 'b')) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a +b +PREHOOK: query: explain +select 2 + 3,x from (select 1 + 2 x) X +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 2 + 3,x from (select 1 + 2 x) X +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 5 (type: int), 3 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 2 + 3,x from (select 1 + 2 x) X +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 2 + 3,x from (select 1 + 2 x) X +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +5 3 diff --git service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java index 8f08c2e..311adc8 100644 --- service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java +++ service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java @@ -130,7 +130,7 @@ public void runInternal() throws HiveSQLException { OperationLog ol = OperationLog.getCurrentOperationLog(); if (ol != null) { for (String consoleMsg : response.getConsoleMessages()) { - ol.writeOperationLog(LoggingLevel.EXECUTION, consoleMsg + "\n"); + ol.writeOperationLog(LoggingLevel.EXECUTION, consoleMsg + "\n", true); } } } diff --git service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java index eaf1acb..b338f8e 100644 --- service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java +++ service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java @@ -226,7 +226,7 @@ public void append(LogEvent event) { LOG.debug(" ---+++=== Dropped log event from thread " + event.getThreadName()); return; } - log.writeOperationLog(logOutput); + log.writeOperationLog(event, logOutput); } protected static class StringOutputStreamManager extends OutputStreamManager { diff --git service/src/java/org/apache/hive/service/cli/operation/Operation.java service/src/java/org/apache/hive/service/cli/operation/Operation.java index 11a820f..9330b5b 100644 --- service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -19,6 +19,7 @@ import java.io.File; import java.io.FileNotFoundException; +import java.io.IOException; import java.util.EnumSet; import java.util.HashMap; import java.util.List; @@ -212,50 +213,15 @@ public boolean isDone() { protected void createOperationLog() { if (parentSession.isOperationLogEnabled()) { - File operationLogFile = new File(parentSession.getOperationLogSessionDir(), - opHandle.getHandleIdentifier().toString()); - isOperationLogEnabled = true; - - // create log file - try { - if (operationLogFile.exists()) { - LOG.warn("The operation log file should not exist, but it is already there: " + - operationLogFile.getAbsolutePath()); - operationLogFile.delete(); - } - if (!operationLogFile.getParentFile().exists()) { - LOG.warn("Operations log directory for this session does not exist, it could have been deleted " + - "externally. Recreating the directory for future queries in this session but the older operation " + - "logs for this session are no longer available"); - if (!operationLogFile.getParentFile().mkdir()) { - LOG.warn("Log directory for this session could not be created, disabling " + - "operation logs: " + operationLogFile.getParentFile().getAbsolutePath()); - isOperationLogEnabled = false; - return; - } - } - if (!operationLogFile.createNewFile()) { - // the log file already exists and cannot be deleted. - // If it can be read/written, keep its contents and use it. - if (!operationLogFile.canRead() || !operationLogFile.canWrite()) { - LOG.warn("The already existed operation log file cannot be recreated, " + - "and it cannot be read or written: " + operationLogFile.getAbsolutePath()); - isOperationLogEnabled = false; - return; - } - } - } catch (Exception e) { - LOG.warn("Unable to create operation log file: " + operationLogFile.getAbsolutePath(), e); - isOperationLogEnabled = false; - return; - } - // create OperationLog object with above log file try { - operationLog = new OperationLog(opHandle.toString(), operationLogFile, parentSession.getHiveConf()); - } catch (FileNotFoundException e) { - LOG.warn("Unable to instantiate OperationLog object for operation: " + - opHandle, e); + operationLog = new OperationLog(opHandle.toString(), + parentSession.getOperationLogSessionDir(), opHandle.getHandleIdentifier().toString(), + parentSession.getHiveConf()); + isOperationLogEnabled = true; + } catch (IOException e) { + LOG.warn("Unable to instantiate OperationLog object for operation: " + opHandle + + ", disabling operation logs", e); isOperationLogEnabled = false; return; } diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index fd74d55..5e46c87 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -756,7 +756,7 @@ public void close() throws HiveSQLException { } private void cleanupSessionLogDir() { - if (isOperationLogEnabled) { + if (isOperationLogEnabled && !sessionConf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) { try { FileUtils.forceDelete(sessionLogDir); LOG.info("Operation log session directory is deleted: "