diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java index 11526a7..f73fef4 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -148,6 +148,10 @@ // Indicates if this instance of beeline is running in compatibility mode, or beeline mode private boolean isBeeLine = true; + // Indicates that we are in test mode. + // Print only the errors, the operation log and the query results. + private boolean isTestMode = false; + private static final Options options = new Options(); public static final String BEELINE_DEFAULT_JDBC_DRIVER = "org.apache.hive.jdbc.HiveDriver"; @@ -2386,4 +2390,19 @@ public String getCurrentDatabase() { public void setCurrentDatabase(String currentDatabase) { this.currentDatabase = currentDatabase; } + + /** + * Setting the BeeLine into test mode. + * Print only the errors, the operation log and the query results. + * Should be used only by tests. + * + * @param isTestMode + */ + void setIsTestMode(boolean isTestMode) { + this.isTestMode = isTestMode; + } + + boolean isTestMode() { + return isTestMode; + } } diff --git beeline/src/java/org/apache/hive/beeline/Commands.java beeline/src/java/org/apache/hive/beeline/Commands.java index 2578728..aac6994 100644 --- beeline/src/java/org/apache/hive/beeline/Commands.java +++ beeline/src/java/org/apache/hive/beeline/Commands.java @@ -978,7 +978,8 @@ private boolean executeInternal(String sql, boolean call) { hasResults = ((CallableStatement) stmnt).execute(); } else { stmnt = beeLine.createStatement(); - if (beeLine.getOpts().isSilent()) { + // In test mode we want the operation logs regardless of the settings + if (!beeLine.isTestMode() && beeLine.getOpts().isSilent()) { hasResults = stmnt.execute(sql); } else { InPlaceUpdateStream.EventNotifier eventNotifier = @@ -1342,7 +1343,12 @@ private void updateQueryLog() { try { List queryLogs = hiveStatement.getQueryLog(); for (String log : queryLogs) { - commands.beeLine.info(log); + if (!commands.beeLine.isTestMode()) { + commands.beeLine.info(log); + } else { + // In test mode print the logs to the output + commands.beeLine.output(log); + } } if (!queryLogs.isEmpty()) { notifier.operationLogShowedToUser(); @@ -1386,7 +1392,12 @@ private void showRemainingLogsIfAny(Statement statement) { return; } for (String log : logs) { - beeLine.info(log); + if (!beeLine.isTestMode()) { + beeLine.info(log); + } else { + // In test mode print the logs to the output + beeLine.output(log); + } } } while (logs.size() > 0); } else { @@ -1810,7 +1821,7 @@ public boolean run(String line) { if (cmd != null) { // we're continuing an existing command - cmd.append(" \n"); + cmd.append("\n"); cmd.append(scriptLine); if (trimmedLine.endsWith(";")) { // this command has terminated diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 7a70c9c..2b70fbe 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -745,7 +745,8 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\ encryption_auto_purge_tables.q beeline.positive.include=drop_with_concurrency.q,\ - escape_comments.q + escape_comments.q,\ + select_dummy_source.q minimr.query.negative.files=cluster_tasklog_retrieval.q,\ file_with_header_footer_negative.q,\ diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index 0d63f5d..5718a49 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -23,10 +23,10 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter; -import org.apache.hive.beeline.qfile.QFile; -import org.apache.hive.beeline.qfile.QFile.QFileBuilder; -import org.apache.hive.beeline.qfile.QFileBeeLineClient; -import org.apache.hive.beeline.qfile.QFileBeeLineClient.QFileClientBuilder; +import org.apache.hive.beeline.QFile; +import org.apache.hive.beeline.QFile.QFileBuilder; +import org.apache.hive.beeline.QFileBeeLineClient; +import org.apache.hive.beeline.QFileBeeLineClient.QFileClientBuilder; import org.apache.hive.jdbc.miniHS2.MiniHS2; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -94,12 +94,9 @@ public void beforeClass() throws Exception { .setPassword("password"); fileBuilder = new QFileBuilder() - .setHiveRootDirectory(hiveRootDirectory) .setLogDirectory(logDirectory) .setQueryDirectory(queryDirectory) - .setResultsDirectory(resultsDirectory) - .setScratchDirectoryString(hiveConf.getVar(HiveConf.ConfVars.SCRATCHDIR)) - .setWarehouseDirectoryString(hiveConf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE)); + .setResultsDirectory(resultsDirectory); runInfraScript(initScript, new File(logDirectory, "init.beeline"), new File(logDirectory, "init.raw")); diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java itests/util/src/main/java/org/apache/hive/beeline/QFile.java similarity index 68% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java rename to itests/util/src/main/java/org/apache/hive/beeline/QFile.java index ae5a349..8c00ea5 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFile.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hive.beeline.qfile; +package org.apache.hive.beeline; import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.ql.QTestProcessExecResult; @@ -29,9 +29,7 @@ import java.io.PrintStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; import java.util.regex.Pattern; /** @@ -40,15 +38,17 @@ */ public final class QFile { private static final String DEBUG_HINT = - "The following files can help you identifying the problem:\n" - + " - Query file: %1\n" - + " - Raw output file: %2\n" - + " - Filtered output file: %3\n" - + " - Expected output file: %4\n" - + " - Client log file: %5\n" - + " - Client log files before the test: %6\n" - + " - Client log files after the test: %7\n" - + " - Hiveserver2 log file: %8\n"; + "The following files can help you identifying the problem:%n" + + " - Query file: %1s%n" + + " - Raw output file: %2s%n" + + " - Filtered output file: %3s%n" + + " - Expected output file: %4s%n" + + " - Client log file: %5s%n" + + " - Client log files before the test: %6s%n" + + " - Client log files after the test: %7s%n" + + " - HiveServer2 log file: %8s%n"; + private static final String MASK_PATTERN = "\n#### A masked pattern was here ####\n"; + private String name; private File inputFile; @@ -199,43 +199,13 @@ public String filter(String input) { // These are the filters which are common for every QTest. // Check specificFilterSet for QTest specific ones. private static RegexFilterSet getStaticFilterSet() { - // Extract the leading four digits from the unix time value. - // Use this as a prefix in order to increase the selectivity - // of the unix time stamp replacement regex. - String currentTimePrefix = Long.toString(System.currentTimeMillis()).substring(0, 4); - - String userName = System.getProperty("user.name"); - - String timePattern = "(Mon|Tue|Wed|Thu|Fri|Sat|Sun) " - + "(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " - + "\\d{2} \\d{2}:\\d{2}:\\d{2} \\w+ 20\\d{2}"; // Pattern to remove the timestamp and other infrastructural info from the out file - String logPattern = "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d*\\s+\\S+\\s+\\[" + - ".*\\]\\s+\\S+:\\s+"; - String operatorPattern = "\"(CONDITION|COPY|DEPENDENCY_COLLECTION|DDL" - + "|EXPLAIN|FETCH|FIL|FS|FUNCTION|GBY|HASHTABLEDUMMY|HASTTABLESINK|JOIN" - + "|LATERALVIEWFORWARD|LIM|LVJ|MAP|MAPJOIN|MAPRED|MAPREDLOCAL|MOVE|OP|RS" - + "|SCR|SEL|STATS|TS|UDTF|UNION)_\\d+\""; - return new RegexFilterSet() - .addFilter(logPattern, "") - .addFilter("(?s)\n[^\n]*Waiting to acquire compile lock.*?Acquired the compile lock.\n", - "\n") - .addFilter("Acquired the compile lock.\n","") - .addFilter("Getting log thread is interrupted, since query is done!\n", "") - .addFilter("going to print operations logs\n", "") - .addFilter("printed operations logs\n", "") - .addFilter("\\(queryId=[^\\)]*\\)", "queryId=(!!{queryId}!!)") - .addFilter("file:/\\w\\S+", "file:/!!ELIDED!!") - .addFilter("pfile:/\\w\\S+", "pfile:/!!ELIDED!!") - .addFilter("hdfs:/\\w\\S+", "hdfs:/!!ELIDED!!") - .addFilter("last_modified_by=\\w+", "last_modified_by=!!ELIDED!!") - .addFilter(timePattern, "!!TIMESTAMP!!") - .addFilter("(\\D)" + currentTimePrefix + "\\d{6}(\\D)", "$1!!UNIXTIME!!$2") - .addFilter("(\\D)" + currentTimePrefix + "\\d{9}(\\D)", "$1!!UNIXTIMEMILLIS!!$2") - .addFilter(userName, "!!{user.name}!!") - .addFilter(operatorPattern, "\"$1_!!ELIDED!!\"") - .addFilter("Time taken: [0-9\\.]* seconds", "Time taken: !!ELIDED!! seconds"); + .addFilter("Reading log file: .*\n", "") + .addFilter("INFO : ", "") + .addFilter("\n.*/tmp/.*\n", MASK_PATTERN) + .addFilter("\n.*CreateTime.*\n", MASK_PATTERN) + .addFilter("\n.*transient_lastDdlTime.*\n", MASK_PATTERN); } /** @@ -246,9 +216,6 @@ private static RegexFilterSet getStaticFilterSet() { private File queryDirectory; private File logDirectory; private File resultsDirectory; - private String scratchDirectoryString; - private String warehouseDirectoryString; - private File hiveRootDirectory; public QFileBuilder() { } @@ -268,21 +235,6 @@ public QFileBuilder setResultsDirectory(File resultsDirectory) { return this; } - public QFileBuilder setScratchDirectoryString(String scratchDirectoryString) { - this.scratchDirectoryString = scratchDirectoryString; - return this; - } - - public QFileBuilder setWarehouseDirectoryString(String warehouseDirectoryString) { - this.warehouseDirectoryString = warehouseDirectoryString; - return this; - } - - public QFileBuilder setHiveRootDirectory(File hiveRootDirectory) { - this.hiveRootDirectory = hiveRootDirectory; - return this; - } - public QFile getQFile(String name) throws IOException { QFile result = new QFile(); result.name = name; @@ -293,15 +245,10 @@ public QFile getQFile(String name) throws IOException { result.logFile = new File(logDirectory, name + ".q.beeline"); result.beforeExecuteLogFile = new File(logDirectory, name + ".q.beforeExecute.log"); result.afterExecuteLogFile = new File(logDirectory, name + ".q.afterExecute.log"); - // These are the filters which are specific for the given QTest. - // Check staticFilterSet for common filters. result.specificFilterSet = new RegexFilterSet() - .addFilter(scratchDirectoryString + "[\\w\\-/]+", "!!{hive.exec.scratchdir}!!") - .addFilter(warehouseDirectoryString, "!!{hive.metastore.warehouse.dir}!!") - .addFilter(resultsDirectory.getAbsolutePath(), "!!{expectedDirectory}!!") - .addFilter(logDirectory.getAbsolutePath(), "!!{outputDirectory}!!") - .addFilter(queryDirectory.getAbsolutePath(), "!!{qFileDirectory}!!") - .addFilter(hiveRootDirectory.getAbsolutePath(), "!!{hive.root}!!"); + .addFilter("(PREHOOK|POSTHOOK): (Output|Input): database:" + name + "\n", + "$1: $2: database:default\n") + .addFilter("(PREHOOK|POSTHOOK): (Output|Input): " + name + "@", "$1: $2: default@"); return result; } } diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java similarity index 91% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java rename to itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java index 760fde6..873bfc6 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java @@ -16,9 +16,7 @@ * limitations under the License. */ -package org.apache.hive.beeline.qfile; - -import org.apache.hive.beeline.BeeLine; +package org.apache.hive.beeline; import java.io.File; import java.io.IOException; @@ -54,7 +52,6 @@ protected QFileBeeLineClient(String jdbcUrl, String jdbcDriver, String username, public void execute(String[] commands, File resultFile) throws SQLException { beeLine.runCommands( new String[] { - "!set outputformat csv", "!record " + resultFile.getAbsolutePath() }); @@ -69,18 +66,28 @@ public void execute(String[] commands, File resultFile) throws SQLException { private void beforeExecute(QFile qFile) throws SQLException { execute( new String[] { + "!set outputformat tsv2", + "!set verbose false", + "!set silent true", + "!set showheader false", "USE default;", "SHOW TABLES;", "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;", "CREATE DATABASE `" + qFile.getName() + "`;", - "USE `" + qFile.getName() + "`;" + "USE `" + qFile.getName() + "`;", }, qFile.getBeforeExecuteLogFile()); + beeLine.setIsTestMode(true); } private void afterExecute(QFile qFile) throws SQLException { + beeLine.setIsTestMode(false); execute( new String[] { + "!set verbose true", + "!set silent false", + "!set showheader true", + "!set outputformat table", "USE default;", "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;", }, diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java itests/util/src/main/java/org/apache/hive/beeline/package-info.java similarity index 89% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java rename to itests/util/src/main/java/org/apache/hive/beeline/package-info.java index fcd50ec..e05ac0a 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java +++ itests/util/src/main/java/org/apache/hive/beeline/package-info.java @@ -17,6 +17,6 @@ */ /** - * Package for the BeeLine specific QTest file classes. + * Package for the BeeLine specific QTest classes. */ -package org.apache.hive.beeline.qfile; +package org.apache.hive.beeline; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index a5c0fcd..37fffe9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -32,6 +32,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.ql.exec.SerializationUtilities; +import org.apache.hadoop.hive.ql.log.LogDivertAppenderForTest; import org.apache.hadoop.mapreduce.MRJobConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -634,6 +635,7 @@ private static void setupChildLog4j(Configuration conf) { try { LogUtils.initHiveExecLog4j(); LogDivertAppender.registerRoutingAppender(conf); + LogDivertAppenderForTest.registerRoutingAppenderIfInTest(conf); } catch (LogInitializationException e) { System.err.println(e.getMessage()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/log/LogDivertAppenderForTest.java ql/src/java/org/apache/hadoop/hive/ql/log/LogDivertAppenderForTest.java new file mode 100644 index 0000000..966c264 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/log/LogDivertAppenderForTest.java @@ -0,0 +1,182 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.log; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.RandomAccessFileAppender; +import org.apache.logging.log4j.core.appender.routing.Route; +import org.apache.logging.log4j.core.appender.routing.Routes; +import org.apache.logging.log4j.core.appender.routing.RoutingAppender; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.logging.log4j.core.config.Node; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.config.plugins.processor.PluginEntry; +import org.apache.logging.log4j.core.config.plugins.util.PluginType; +import org.apache.logging.log4j.core.filter.AbstractFilter; +import org.apache.logging.log4j.core.layout.PatternLayout; + +/** + * Divert appender to redirect and filter test operation logs to match the output of the original + * CLI qtest results. + */ +public final class LogDivertAppenderForTest { + private LogDivertAppenderForTest() { + // Prevent instantiation + } + + /** + * A log filter that filters test messages coming from the logger. + */ + @Plugin(name = "TestFilter", category = "Core", elementType="filter", printObject = true) + private static class TestFilter extends AbstractFilter { + @Override + public Result filter(LogEvent event) { + if (event.getLevel().equals(Level.INFO) && "SessionState".equals(event.getLoggerName())) { + if (event.getMessage().getFormattedMessage().startsWith("PREHOOK:") + || event.getMessage().getFormattedMessage().startsWith("POSTHOOK:")) { + return Result.ACCEPT; + } + } + return Result.DENY; + } + + @PluginFactory + public static TestFilter createFilter() { + return new TestFilter(); + } + } + + /** + * If the HIVE_IN_TEST is set, then programmatically register a routing appender to Log4J + * configuration, which automatically writes the test log of each query to an individual file. + * The equivalent property configuration is as follows: + * # queryId based routing file appender + appender.test-query-routing.type = Routing + appender.test-query-routing.name = test-query-routing + appender.test-query-routing.routes.type = Routes + appender.test-query-routing.routes.pattern = $${ctx:queryId} + # default route + appender.test-query-routing.routes.test-route-default.type = Route + appender.test-query-routing.routes.test-route-default.key = $${ctx:queryId} + appender.test-query-routing.routes.test-route-default.app.type = NullAppender + appender.test-query-routing.routes.test-route-default.app.name = test-null-appender + # queryId based route + appender.test-query-routing.routes.test-route-mdc.type = Route + appender.test-query-routing.routes.test-route-mdc.name = test-query-routing + appender.test-query-routing.routes.test-route-mdc.app.type = RandomAccessFile + appender.test-query-routing.routes.test-route-mdc.app.name = test-query-file-appender + appender.test-query-routing.routes.test-route-mdc.app.fileName = ${sys:hive.log.dir}/${ctx:sessionId}/${ctx:queryId}.test + appender.test-query-routing.routes.test-route-mdc.app.layout.type = PatternLayout + appender.test-query-routing.routes.test-route-mdc.app.layout.pattern = %d{ISO8601} %5p %c{2}: %m%n + appender.test-query-routing.routes.test-route-mdc.app.filter.type = TestFilter + * @param conf the configuration for HiveServer2 instance + */ + public static void registerRoutingAppenderIfInTest(org.apache.hadoop.conf.Configuration conf) { + if (!conf.getBoolean(HiveConf.ConfVars.HIVE_IN_TEST.varname, + HiveConf.ConfVars.HIVE_IN_TEST.defaultBoolVal)) { + // If not in test mode, then do no create the appender + return; + } + + String logLocation = + HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION); + + // Create test-null-appender to drop events without queryId + PluginEntry nullAppenderEntry = new PluginEntry(); + nullAppenderEntry.setClassName(NullAppender.class.getName()); + PluginType nullAppenderType = + new PluginType(nullAppenderEntry, NullAppender.class, "appender"); + Node nullAppenderChildNode = new Node(null, "test-null-appender", nullAppenderType); + + // Create default route where events go without queryId + PluginEntry defaultRouteEntry = new PluginEntry(); + defaultRouteEntry.setClassName(Route.class.getName()); + PluginType defaultRouteType = new PluginType(defaultRouteEntry, Route.class, ""); + Node defaultRouteNode = new Node(null, "test-route-default", defaultRouteType); + // Add the test-null-appender to the default route + defaultRouteNode.getChildren().add(nullAppenderChildNode); + + // Create queryId based route + PluginEntry queryIdRouteEntry = new PluginEntry(); + queryIdRouteEntry.setClassName(Route.class.getName()); + PluginType queryIdRouteType = new PluginType(queryIdRouteEntry, Route.class, ""); + Node queryIdRouteNode = new Node(null, "test-route-mdc", queryIdRouteType); + + // Create the queryId appender for the queryId route + PluginEntry queryIdAppenderEntry = new PluginEntry(); + queryIdAppenderEntry.setClassName(RandomAccessFileAppender.class.getName()); + PluginType queryIdAppenderType = + new PluginType(queryIdAppenderEntry, + RandomAccessFileAppender.class, "appender"); + Node queryIdAppenderNode = + new Node(queryIdRouteNode, "test-query-file-appender", queryIdAppenderType); + queryIdAppenderNode.getAttributes().put("fileName", logLocation + + "/${ctx:sessionId}/${ctx:queryId}.test"); + queryIdAppenderNode.getAttributes().put("name", "test-query-file-appender"); + // Add the queryId appender to the queryId based route + queryIdRouteNode.getChildren().add(queryIdAppenderNode); + + // Create the filter for the queryId appender + PluginEntry filterEntry = new PluginEntry(); + filterEntry.setClassName(TestFilter.class.getName()); + PluginType filterType = + new PluginType(filterEntry, TestFilter.class, ""); + Node filterNode = new Node(queryIdAppenderNode, "test-filter", filterType); + // Add the filter to the queryId appender + queryIdAppenderNode.getChildren().add(filterNode); + + // Create the layout for the queryId appender + PluginEntry layoutEntry = new PluginEntry(); + layoutEntry.setClassName(PatternLayout.class.getName()); + PluginType layoutType = + new PluginType(layoutEntry, PatternLayout.class, ""); + Node layoutNode = new Node(queryIdAppenderNode, "PatternLayout", layoutType); + layoutNode.getAttributes().put("pattern", LogDivertAppender.nonVerboseLayout); + // Add the layout to the queryId appender + queryIdAppenderNode.getChildren().add(layoutNode); + + // Create the route objects based on the Nodes + Route defaultRoute = Route.createRoute(null, "${ctx:queryId}", defaultRouteNode); + Route mdcRoute = Route.createRoute(null, null, queryIdRouteNode); + // Create the routes group + Routes routes = Routes.createRoutes("${ctx:queryId}", defaultRoute, mdcRoute); + + LoggerContext context = (LoggerContext)LogManager.getContext(false); + Configuration configuration = context.getConfiguration(); + + // Create the appender + RoutingAppender routingAppender = RoutingAppender.createAppender("test-query-routing", + "true", + routes, + configuration, + null, + null, + null); + + LoggerConfig loggerConfig = configuration.getRootLogger(); + loggerConfig.addAppender(routingAppender, null, null); + context.updateLoggers(); + routingAppender.start(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java index c37a633..43986c5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java @@ -37,12 +37,20 @@ private final String operationName; private final LogFile logFile; + // If in test mode then the LogDivertAppenderForTest created an extra log file containing only + // the output needed for the qfile results. + private final LogFile testLogFile; + // True if we are running test and the extra test file should be used when the logs are + // requested. Also in test mode we do not close the logs in the end, so it in case of a + // failure both the original operation log, and the test log is accessible too. + private final boolean isInTest; private LoggingLevel opLoggingLevel = LoggingLevel.UNKNOWN; public enum LoggingLevel { NONE, EXECUTION, PERFORMANCE, VERBOSE, UNKNOWN } + public OperationLog(String name, File file, HiveConf hiveConf) throws FileNotFoundException { operationName = name; logFile = new LogFile(file); @@ -51,6 +59,16 @@ public OperationLog(String name, File file, HiveConf hiveConf) throws FileNotFou String logLevel = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL); opLoggingLevel = getLoggingLevel(logLevel); } + + // If in test mod create a test log file which will contain only logs which are supposed to + // be written to the qtest output + if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) { + testLogFile = new LogFile(new File(file.getAbsolutePath() + ".test")); + isInTest = true; + } else { + testLogFile = null; + isInTest = false; + } } public static LoggingLevel getLoggingLevel (String mode) { @@ -79,15 +97,25 @@ public LoggingLevel getOpLoggingLevel() { * @throws java.sql.SQLException */ public List readOperationLog(boolean isFetchFirst, long maxRows) - throws SQLException{ - return logFile.read(isFetchFirst, maxRows); + throws SQLException { + if (isInTest) { + return testLogFile.read(isFetchFirst, maxRows); + } else { + return logFile.read(isFetchFirst, maxRows); + } } /** * Close this OperationLog when operation is closed. The log file will be removed. */ public void close() { - logFile.remove(); + if (isInTest) { + // In case of test, do just close the log files, do not remove them. + logFile.close(false); + testLogFile.close(false); + } else { + logFile.close(true); + } } /** @@ -113,12 +141,16 @@ public void close() { return readResults(maxRows); } - synchronized void remove() { + /** + * Close the logs, and remove them if specified. + * @param removeLog If true, remove the log file + */ + synchronized void close(boolean removeLog) { try { if (in != null) { in.close(); } - if (!isRemoved) { + if (!isRemoved && removeLog) { FileUtils.forceDelete(file); isRemoved = true; } @@ -135,15 +167,21 @@ private void resetIn() { } private List readResults(long nLines) throws SQLException { + List logs = new ArrayList(); if (in == null) { try { in = new BufferedReader(new InputStreamReader(new FileInputStream(file))); + // Adding name of the log file in an extra log line, so it is easier to find + // the original if there is a test error + if (isInTest) { + logs.add("Reading log file: " + file); + nLines--; + } } catch (FileNotFoundException e) { - return new ArrayList(); + return logs; } } - List logs = new ArrayList(); String line = ""; // if nLines <= 0, read all lines in log file. for (int i = 0; i < nLines || nLines <= 0; i++) { diff --git ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out index 385f9b7..2c00f51 100644 --- ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out +++ ql/src/test/results/clientpositive/beeline/drop_with_concurrency.q.out @@ -1,64 +1,20 @@ ->>> !run !!{qFileDirectory}!!/drop_with_concurrency.q ->>> set hive.lock.numretries=1; -No rows affected ->>> set hive.lock.sleep.between.retries=1; -No rows affected ->>> set hive.support.concurrency=true; -No rows affected ->>> set hive.lock.manager=org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager; -No rows affected ->>> ->>> drop table if exists drop_with_concurrency_1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): drop table if exists drop_with_concurrency_1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:null, properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): drop table if exists drop_with_concurrency_1 -ERROR : PREHOOK: query: drop table if exists drop_with_concurrency_1 -ERROR : PREHOOK: type: DROPTABLE -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: drop table if exists drop_with_concurrency_1 -ERROR : POSTHOOK: type: DROPTABLE -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query drop table if exists drop_with_concurrency_1 -No rows affected ->>> create table drop_with_concurrency_1 (c1 int); -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): create table drop_with_concurrency_1 (c1 int) -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:null, properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): create table drop_with_concurrency_1 (c1 int) -ERROR : PREHOOK: query: create table drop_with_concurrency_1 (c1 int) -ERROR : PREHOOK: type: CREATETABLE -ERROR : PREHOOK: Output: database:drop_with_concurrency -ERROR : PREHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: create table drop_with_concurrency_1 (c1 int) -ERROR : POSTHOOK: type: CREATETABLE -ERROR : POSTHOOK: Output: database:drop_with_concurrency -ERROR : POSTHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query create table drop_with_concurrency_1 (c1 int) -No rows affected ->>> drop table drop_with_concurrency_1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): drop table drop_with_concurrency_1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:null, properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): drop table drop_with_concurrency_1 -ERROR : PREHOOK: query: drop table drop_with_concurrency_1 -ERROR : PREHOOK: type: DROPTABLE -ERROR : PREHOOK: Input: drop_with_concurrency@drop_with_concurrency_1 -ERROR : PREHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: drop table drop_with_concurrency_1 -ERROR : POSTHOOK: type: DROPTABLE -ERROR : POSTHOOK: Input: drop_with_concurrency@drop_with_concurrency_1 -ERROR : POSTHOOK: Output: drop_with_concurrency@drop_with_concurrency_1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query drop table drop_with_concurrency_1 -No rows affected ->>> !record +PREHOOK: query: drop table if exists drop_with_concurrency_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists drop_with_concurrency_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table drop_with_concurrency_1 (c1 int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@drop_with_concurrency_1 +POSTHOOK: query: create table drop_with_concurrency_1 (c1 int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@drop_with_concurrency_1 +PREHOOK: query: drop table drop_with_concurrency_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@drop_with_concurrency_1 +PREHOOK: Output: default@drop_with_concurrency_1 +POSTHOOK: query: drop table drop_with_concurrency_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@drop_with_concurrency_1 +POSTHOOK: Output: default@drop_with_concurrency_1 diff --git ql/src/test/results/clientpositive/beeline/escape_comments.q.out ql/src/test/results/clientpositive/beeline/escape_comments.q.out index abc0fee..07fef57 100644 --- ql/src/test/results/clientpositive/beeline/escape_comments.q.out +++ ql/src/test/results/clientpositive/beeline/escape_comments.q.out @@ -1,407 +1,217 @@ ->>> !run !!{qFileDirectory}!!/escape_comments.q ->>> create database escape_comments_db comment 'a\nb'; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): create database escape_comments_db comment 'a\nb' -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:null, properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): create database escape_comments_db comment 'a\nb' -ERROR : PREHOOK: query: create database escape_comments_db comment 'a\nb' -ERROR : PREHOOK: type: CREATEDATABASE -ERROR : PREHOOK: Output: database:escape_comments_db -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: create database escape_comments_db comment 'a\nb' -ERROR : POSTHOOK: type: CREATEDATABASE -ERROR : POSTHOOK: Output: database:escape_comments_db -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query create database escape_comments_db comment 'a\nb' -No rows affected ->>> use escape_comments_db; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): use escape_comments_db -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:null, properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): use escape_comments_db -ERROR : PREHOOK: query: use escape_comments_db -ERROR : PREHOOK: type: SWITCHDATABASE -ERROR : PREHOOK: Input: database:escape_comments_db -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: use escape_comments_db -ERROR : POSTHOOK: type: SWITCHDATABASE -ERROR : POSTHOOK: Input: database:escape_comments_db -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query use escape_comments_db -No rows affected ->>> create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb'); -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' +PREHOOK: query: create database escape_comments_db comment 'a\nb' +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:escape_comments_db +POSTHOOK: query: create database escape_comments_db comment 'a\nb' +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:escape_comments_db +PREHOOK: query: use escape_comments_db +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:escape_comments_db +POSTHOOK: query: use escape_comments_db +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:escape_comments_db +PREHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' partitioned by (p1 string comment 'a\nb') -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:null, properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' partitioned by (p1 string comment 'a\nb') -ERROR : PREHOOK: query: create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -ERROR : PREHOOK: type: CREATETABLE -ERROR : PREHOOK: Output: database:escape_comments_db -ERROR : PREHOOK: Output: escape_comments_db@escape_comments_tbl1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -ERROR : POSTHOOK: type: CREATETABLE -ERROR : POSTHOOK: Output: database:escape_comments_db -ERROR : POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query create table escape_comments_tbl1 -(col1 string comment 'a\nb\'\;') comment 'a\nb' -partitioned by (p1 string comment 'a\nb') -No rows affected ->>> create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 +PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' as select col1 from escape_comments_tbl1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col1, type:string, comment:null)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +PREHOOK: type: CREATEVIEW +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_view1 +POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' as select col1 from escape_comments_tbl1 -ERROR : PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -ERROR : PREHOOK: type: CREATEVIEW -ERROR : PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -ERROR : PREHOOK: Output: database:escape_comments_db -ERROR : PREHOOK: Output: escape_comments_db@escape_comments_view1 -INFO : Starting task [Stage-1:DDL] in serial mode -ERROR : POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -ERROR : POSTHOOK: type: CREATEVIEW -ERROR : POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -ERROR : POSTHOOK: Output: database:escape_comments_db -ERROR : POSTHOOK: Output: escape_comments_db@escape_comments_view1 -ERROR : POSTHOOK: Lineage: escape_comments_view1.col1 SIMPLE [(escape_comments_tbl1)escape_comments_tbl1.FieldSchema(name:col1, type:string, comment:a +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_view1 +POSTHOOK: Lineage: escape_comments_view1.col1 SIMPLE [(escape_comments_tbl1)escape_comments_tbl1.FieldSchema(name:col1, type:string, comment:a b';), ] -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' -as select col1 from escape_comments_tbl1 -No rows affected ->>> create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb'; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:null, properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -ERROR : PREHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -ERROR : PREHOOK: type: CREATEINDEX -ERROR : PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -ERROR : POSTHOOK: type: CREATEINDEX -ERROR : POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -ERROR : POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' -No rows affected ->>> ->>> describe database extended escape_comments_db; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): describe database extended escape_comments_db -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:db_name, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer), FieldSchema(name:location, type:string, comment:from deserializer), FieldSchema(name:owner_name, type:string, comment:from deserializer), FieldSchema(name:owner_type, type:string, comment:from deserializer), FieldSchema(name:parameters, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): describe database extended escape_comments_db -ERROR : PREHOOK: query: describe database extended escape_comments_db -ERROR : PREHOOK: type: DESCDATABASE -ERROR : PREHOOK: Input: database:escape_comments_db -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: describe database extended escape_comments_db -ERROR : POSTHOOK: type: DESCDATABASE -ERROR : POSTHOOK: Input: database:escape_comments_db -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query describe database extended escape_comments_db -'db_name','comment','location','owner_name','owner_type','parameters' -'escape_comments_db','a\nb','location/in/test','user','USER','' -1 row selected ->>> describe database escape_comments_db; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): describe database escape_comments_db -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:db_name, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer), FieldSchema(name:location, type:string, comment:from deserializer), FieldSchema(name:owner_name, type:string, comment:from deserializer), FieldSchema(name:owner_type, type:string, comment:from deserializer), FieldSchema(name:parameters, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): describe database escape_comments_db -ERROR : PREHOOK: query: describe database escape_comments_db -ERROR : PREHOOK: type: DESCDATABASE -ERROR : PREHOOK: Input: database:escape_comments_db -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: describe database escape_comments_db -ERROR : POSTHOOK: type: DESCDATABASE -ERROR : POSTHOOK: Input: database:escape_comments_db -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query describe database escape_comments_db -'db_name','comment','location','owner_name','owner_type','parameters' -'escape_comments_db','a\nb','location/in/test','user','USER','' -1 row selected ->>> show create table escape_comments_tbl1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): show create table escape_comments_tbl1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:createtab_stmt, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): show create table escape_comments_tbl1 -ERROR : PREHOOK: query: show create table escape_comments_tbl1 -ERROR : PREHOOK: type: SHOW_CREATETABLE -ERROR : PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: show create table escape_comments_tbl1 -ERROR : POSTHOOK: type: SHOW_CREATETABLE -ERROR : POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query show create table escape_comments_tbl1 -'createtab_stmt' -'CREATE TABLE `escape_comments_tbl1`(' -' `col1` string COMMENT 'a\nb\'\;')' -'COMMENT 'a\nb'' -'PARTITIONED BY ( ' -' `p1` string COMMENT 'a\nb')' -'ROW FORMAT SERDE ' -' 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' -'STORED AS INPUTFORMAT ' -' 'org.apache.hadoop.mapred.TextInputFormat' ' -'OUTPUTFORMAT ' -' 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'' -'LOCATION' -' '!!{hive.metastore.warehouse.dir}!!/escape_comments_db.db/escape_comments_tbl1'' -'TBLPROPERTIES (' -' 'transient_lastDdlTime'='!!UNIXTIME!!')' -15 rows selected ->>> describe formatted escape_comments_tbl1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): describe formatted escape_comments_tbl1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): describe formatted escape_comments_tbl1 -ERROR : PREHOOK: query: describe formatted escape_comments_tbl1 -ERROR : PREHOOK: type: DESCTABLE -ERROR : PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: describe formatted escape_comments_tbl1 -ERROR : POSTHOOK: type: DESCTABLE -ERROR : POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query describe formatted escape_comments_tbl1 -'col_name','data_type','comment' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'col1','string','a\nb';' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'p1','string','a\nb' -'','NULL','NULL' -'# Detailed Table Information','NULL','NULL' -'Database: ','escape_comments_db ','NULL' -'Owner: ','user ','NULL' -'CreateTime: ','!!TIMESTAMP!!','NULL' -'LastAccessTime: ','UNKNOWN ','NULL' -'Retention: ','0 ','NULL' -'Location: ','!!{hive.metastore.warehouse.dir}!!/escape_comments_db.db/escape_comments_tbl1','NULL' -'Table Type: ','MANAGED_TABLE ','NULL' -'Table Parameters:','NULL','NULL' -'','COLUMN_STATS_ACCURATE','{\"BASIC_STATS\":\"true\"}' -'','comment ','a\nb ' -'','numFiles ','0 ' -'','numPartitions ','0 ' -'','numRows ','0 ' -'','rawDataSize ','0 ' -'','totalSize ','0 ' -'','transient_lastDdlTime','!!UNIXTIME!! ' -'','NULL','NULL' -'# Storage Information','NULL','NULL' -'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','NULL' -'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','NULL' -'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','NULL' -'Compressed: ','No ','NULL' -'Num Buckets: ','-1 ','NULL' -'Bucket Columns: ','[] ','NULL' -'Sort Columns: ','[] ','NULL' -'Storage Desc Params:','NULL','NULL' -'','serialization.format','1 ' -37 rows selected ->>> describe pretty escape_comments_tbl1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): describe pretty escape_comments_tbl1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): describe pretty escape_comments_tbl1 -ERROR : PREHOOK: query: describe pretty escape_comments_tbl1 -ERROR : PREHOOK: type: DESCTABLE -ERROR : PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: describe pretty escape_comments_tbl1 -ERROR : POSTHOOK: type: DESCTABLE -ERROR : POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query describe pretty escape_comments_tbl1 -'col_name','data_type','comment' -'col_name ','data_type ','comment' -'','','' -'col1 ','string ','a' -' ',' ','b';' -'p1 ','string ','a' -' ',' ','b' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'col_name ','data_type ','comment' -'','','' -'p1 ','string ','a' -' ',' ','b' -12 rows selected ->>> describe escape_comments_tbl1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): describe escape_comments_tbl1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): describe escape_comments_tbl1 -ERROR : PREHOOK: query: describe escape_comments_tbl1 -ERROR : PREHOOK: type: DESCTABLE -ERROR : PREHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: describe escape_comments_tbl1 -ERROR : POSTHOOK: type: DESCTABLE -ERROR : POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query describe escape_comments_tbl1 -'col_name','data_type','comment' -'col1','string','a\nb';' -'p1','string','a\nb' -'','NULL','NULL' -'# Partition Information','NULL','NULL' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'p1','string','a\nb' -7 rows selected ->>> show create table escape_comments_view1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): show create table escape_comments_view1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:createtab_stmt, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): show create table escape_comments_view1 -ERROR : PREHOOK: query: show create table escape_comments_view1 -ERROR : PREHOOK: type: SHOW_CREATETABLE -ERROR : PREHOOK: Input: escape_comments_db@escape_comments_view1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: show create table escape_comments_view1 -ERROR : POSTHOOK: type: SHOW_CREATETABLE -ERROR : POSTHOOK: Input: escape_comments_db@escape_comments_view1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query show create table escape_comments_view1 -'createtab_stmt' -'CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1`' -1 row selected ->>> describe formatted escape_comments_view1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): describe formatted escape_comments_view1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): describe formatted escape_comments_view1 -ERROR : PREHOOK: query: describe formatted escape_comments_view1 -ERROR : PREHOOK: type: DESCTABLE -ERROR : PREHOOK: Input: escape_comments_db@escape_comments_view1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: describe formatted escape_comments_view1 -ERROR : POSTHOOK: type: DESCTABLE -ERROR : POSTHOOK: Input: escape_comments_db@escape_comments_view1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query describe formatted escape_comments_view1 -'col_name','data_type','comment' -'# col_name ','data_type ','comment ' -'','NULL','NULL' -'col1','string','a\nb' -'','NULL','NULL' -'# Detailed Table Information','NULL','NULL' -'Database: ','escape_comments_db ','NULL' -'Owner: ','user ','NULL' -'CreateTime: ','!!TIMESTAMP!!','NULL' -'LastAccessTime: ','UNKNOWN ','NULL' -'Retention: ','0 ','NULL' -'Table Type: ','VIRTUAL_VIEW ','NULL' -'Table Parameters:','NULL','NULL' -'','comment ','a\nb ' -'','transient_lastDdlTime','!!UNIXTIME!! ' -'','NULL','NULL' -'# Storage Information','NULL','NULL' -'SerDe Library: ','null ','NULL' -'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','NULL' -'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','NULL' -'Compressed: ','No ','NULL' -'Num Buckets: ','-1 ','NULL' -'Bucket Columns: ','[] ','NULL' -'Sort Columns: ','[] ','NULL' -'','NULL','NULL' -'# View Information','NULL','NULL' -'View Original Text: ','select col1 from escape_comments_tbl1','NULL' -'View Expanded Text: ','SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1`','NULL' -'View Rewrite Enabled:','No ','NULL' -28 rows selected ->>> show formatted index on escape_comments_tbl1; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): show formatted index on escape_comments_tbl1 -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:idx_name, type:string, comment:from deserializer), FieldSchema(name:tab_name, type:string, comment:from deserializer), FieldSchema(name:col_names, type:string, comment:from deserializer), FieldSchema(name:idx_tab_name, type:string, comment:from deserializer), FieldSchema(name:idx_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): show formatted index on escape_comments_tbl1 -ERROR : PREHOOK: query: show formatted index on escape_comments_tbl1 -ERROR : PREHOOK: type: SHOWINDEXES -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: show formatted index on escape_comments_tbl1 -ERROR : POSTHOOK: type: SHOWINDEXES -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query show formatted index on escape_comments_tbl1 -'idx_name','tab_name','col_names','idx_tab_name','idx_type','comment' -'idx_name ','tab_name ','col_names ','idx_tab_name ','idx_type ','comment ' -'','NULL','NULL','NULL','NULL','NULL' -'','NULL','NULL','NULL','NULL','NULL' -'index2 ','escape_comments_tbl1','col1 ','escape_comments_db__escape_comments_tbl1_index2__','compact ','a\nb ' -4 rows selected ->>> ->>> drop database escape_comments_db cascade; -DEBUG : INFO : Compiling commandqueryId=(!!{queryId}!!): drop database escape_comments_db cascade -INFO : Semantic Analysis Completed -INFO : Returning Hive schema: Schema(fieldSchemas:null, properties:null) -INFO : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : Executing commandqueryId=(!!{queryId}!!): drop database escape_comments_db cascade -ERROR : PREHOOK: query: drop database escape_comments_db cascade -ERROR : PREHOOK: type: DROPDATABASE -ERROR : PREHOOK: Input: database:escape_comments_db -ERROR : PREHOOK: Output: database:escape_comments_db -ERROR : PREHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ -ERROR : PREHOOK: Output: escape_comments_db@escape_comments_tbl1 -ERROR : PREHOOK: Output: escape_comments_db@escape_comments_view1 -INFO : Starting task [Stage-0:DDL] in serial mode -ERROR : POSTHOOK: query: drop database escape_comments_db cascade -ERROR : POSTHOOK: type: DROPDATABASE -ERROR : POSTHOOK: Input: database:escape_comments_db -ERROR : POSTHOOK: Output: database:escape_comments_db -ERROR : POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ -ERROR : POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 -ERROR : POSTHOOK: Output: escape_comments_db@escape_comments_view1 -INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds -INFO : OK -DEBUG : Shutting down query drop database escape_comments_db cascade -No rows affected ->>> !record +PREHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +PREHOOK: type: CREATEINDEX +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +POSTHOOK: type: CREATEINDEX +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +PREHOOK: query: describe database extended escape_comments_db +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:escape_comments_db +POSTHOOK: query: describe database extended escape_comments_db +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:escape_comments_db +escape_comments_db a\nb location/in/test user USER +PREHOOK: query: describe database escape_comments_db +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:escape_comments_db +POSTHOOK: query: describe database escape_comments_db +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:escape_comments_db +escape_comments_db a\nb location/in/test user USER +PREHOOK: query: show create table escape_comments_tbl1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: show create table escape_comments_tbl1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +CREATE TABLE `escape_comments_tbl1`( + `col1` string COMMENT 'a\nb\'\;') +COMMENT 'a\nb' +PARTITIONED BY ( + `p1` string COMMENT 'a\nb') +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( +#### A masked pattern was here #### +PREHOOK: query: describe formatted escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: describe formatted escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +# col_name data_type comment + NULL NULL +col1 string a\nb'; + NULL NULL +# Partition Information NULL NULL +# col_name data_type comment + NULL NULL +p1 string a\nb + NULL NULL +# Detailed Table Information NULL NULL +Database: escape_comments_db NULL +Owner: user NULL +#### A masked pattern was here #### +LastAccessTime: UNKNOWN NULL +Retention: 0 NULL +#### A masked pattern was here #### +Table Type: MANAGED_TABLE NULL +Table Parameters: NULL NULL + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + comment a\nb + numFiles 0 + numPartitions 0 + numRows 0 + rawDataSize 0 + totalSize 0 +#### A masked pattern was here #### + NULL NULL +# Storage Information NULL NULL +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe NULL +InputFormat: org.apache.hadoop.mapred.TextInputFormat NULL +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat NULL +Compressed: No NULL +Num Buckets: -1 NULL +Bucket Columns: [] NULL +Sort Columns: [] NULL +Storage Desc Params: NULL NULL + serialization.format 1 +PREHOOK: query: describe pretty escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: describe pretty escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +col_name data_type comment + +col1 string a + b'; +p1 string a + b + NULL NULL +# Partition Information NULL NULL +col_name data_type comment + +p1 string a + b +PREHOOK: query: describe escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: query: describe escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +col1 string a\nb'; +p1 string a\nb + NULL NULL +# Partition Information NULL NULL +# col_name data_type comment + NULL NULL +p1 string a\nb +PREHOOK: query: show create table escape_comments_view1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: escape_comments_db@escape_comments_view1 +POSTHOOK: query: show create table escape_comments_view1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: escape_comments_db@escape_comments_view1 +CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1` +PREHOOK: query: describe formatted escape_comments_view1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_view1 +POSTHOOK: query: describe formatted escape_comments_view1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_view1 +# col_name data_type comment + NULL NULL +col1 string a\nb + NULL NULL +# Detailed Table Information NULL NULL +Database: escape_comments_db NULL +Owner: user NULL +#### A masked pattern was here #### +LastAccessTime: UNKNOWN NULL +Retention: 0 NULL +Table Type: VIRTUAL_VIEW NULL +Table Parameters: NULL NULL + comment a\nb +#### A masked pattern was here #### + NULL NULL +# Storage Information NULL NULL +SerDe Library: null NULL +InputFormat: org.apache.hadoop.mapred.TextInputFormat NULL +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat NULL +Compressed: No NULL +Num Buckets: -1 NULL +Bucket Columns: [] NULL +Sort Columns: [] NULL + NULL NULL +# View Information NULL NULL +View Original Text: select col1 from escape_comments_tbl1 NULL +View Expanded Text: SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1` NULL +View Rewrite Enabled: No NULL +PREHOOK: query: show formatted index on escape_comments_tbl1 +PREHOOK: type: SHOWINDEXES +POSTHOOK: query: show formatted index on escape_comments_tbl1 +POSTHOOK: type: SHOWINDEXES +idx_name tab_name col_names idx_tab_name idx_type comment + NULL NULL NULL NULL NULL + NULL NULL NULL NULL NULL +index2 escape_comments_tbl1 col1 escape_comments_db__escape_comments_tbl1_index2__ compact a\nb +PREHOOK: query: drop database escape_comments_db cascade +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:escape_comments_db +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +PREHOOK: Output: escape_comments_db@escape_comments_tbl1 +PREHOOK: Output: escape_comments_db@escape_comments_view1 +POSTHOOK: query: drop database escape_comments_db cascade +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:escape_comments_db +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: escape_comments_db@escape_comments_view1 diff --git ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out new file mode 100644 index 0000000..b6fa03f --- /dev/null +++ ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out @@ -0,0 +1,251 @@ +PREHOOK: query: explain +select 'a', 100 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 'a', 100 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 'a' (type: string), 100 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 'a', 100 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 'a', 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a 100 +PREHOOK: query: explain +select 1 + 1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 1 + 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 2 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 1 + 1 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 1 + 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2 +PREHOOK: query: explain +select explode(array('a', 'b')) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select explode(array('a', 'b')) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array('a','b') (type: array) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + function name: explode + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select explode(array('a', 'b')) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select explode(array('a', 'b')) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a +b +PREHOOK: query: explain +select 'a', 100 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 'a', 100 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 'a' (type: string), 100 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 89 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 'a', 100 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 'a', 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a 100 +PREHOOK: query: explain +select 1 + 1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 1 + 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 2 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 1 + 1 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 1 + 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2 +PREHOOK: query: explain +select explode(array('a', 'b')) +PREHOOK: type: QUERY +POSTHOOK: query: explain +select explode(array('a', 'b')) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array('a','b') (type: array) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + function name: explode + ListSink + +PREHOOK: query: select explode(array('a', 'b')) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select explode(array('a', 'b')) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +a +b +PREHOOK: query: explain +select 2 + 3,x from (select 1 + 2 x) X +PREHOOK: type: QUERY +POSTHOOK: query: explain +select 2 + 3,x from (select 1 + 2 x) X +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 5 (type: int), 3 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + ListSink + +PREHOOK: query: select 2 + 3,x from (select 1 + 2 x) X +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select 2 + 3,x from (select 1 + 2 x) X +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +5 3 diff --git service/src/java/org/apache/hive/service/cli/operation/OperationManager.java service/src/java/org/apache/hive/service/cli/operation/OperationManager.java index f62ee4e..ac64ab2 100644 --- service/src/java/org/apache/hive/service/cli/operation/OperationManager.java +++ service/src/java/org/apache/hive/service/cli/operation/OperationManager.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.log.LogDivertAppender; +import org.apache.hadoop.hive.ql.log.LogDivertAppenderForTest; import org.apache.hadoop.hive.ql.session.OperationLog; import org.apache.hive.service.AbstractService; import org.apache.hive.service.cli.FetchOrientation; @@ -71,6 +72,7 @@ public OperationManager() { @Override public synchronized void init(HiveConf hiveConf) { LogDivertAppender.registerRoutingAppender(hiveConf); + LogDivertAppenderForTest.registerRoutingAppenderIfInTest(hiveConf); if (hiveConf.isWebUiQueryInfoCacheEnabled()) { historicSqlOperations = new SQLOperationDisplayCache( diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 418f453..95a6670 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -763,7 +763,8 @@ public void close() throws HiveSQLException { } private void cleanupSessionLogDir() { - if (isOperationLogEnabled) { + // In case of test, do not remove the log directory + if (isOperationLogEnabled && !sessionConf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) { try { FileUtils.forceDelete(sessionLogDir); LOG.info("Operation log session directory is deleted: "