diff --git beeline/src/java/org/apache/hive/beeline/util/QFileClient.java beeline/src/java/org/apache/hive/beeline/util/QFileClient.java index 81f1b0e..6bcdfd2 100644 --- beeline/src/java/org/apache/hive/beeline/util/QFileClient.java +++ beeline/src/java/org/apache/hive/beeline/util/QFileClient.java @@ -21,12 +21,15 @@ import java.io.File; import java.io.IOException; import java.io.PrintStream; +import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.util.Shell; +import org.apache.hive.common.util.StreamPrinter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -49,6 +52,8 @@ private File expectedDirectory; private final File scratchDirectory; private final File warehouseDirectory; + private final File initScript; + private final File cleanupScript; private File testDataDirectory; private File testScriptDirectory; @@ -73,11 +78,13 @@ public QFileClient(HiveConf hiveConf, String hiveRootDirectory, String qFileDirectory, String outputDirectory, - String expectedDirectory) { + String expectedDirectory, String initScript, String cleanupScript) { this.hiveRootDirectory = new File(hiveRootDirectory); this.qFileDirectory = new File(qFileDirectory); this.outputDirectory = new File(outputDirectory); this.expectedDirectory = new File(expectedDirectory); + this.initScript = new File(initScript); + this.cleanupScript = new File(cleanupScript); this.scratchDirectory = new File(hiveConf.getVar(ConfVars.SCRATCHDIR)); this.warehouseDirectory = new File(hiveConf.getVar(ConfVars.METASTOREWAREHOUSE)); } @@ -110,6 +117,9 @@ void initFilterSet() { String timePattern = "(Mon|Tue|Wed|Thu|Fri|Sat|Sun) " + "(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + "\\d{2} \\d{2}:\\d{2}:\\d{2} \\w+ 20\\d{2}"; + // Pattern to remove the timestamp and other infrastructural info from the out file + String logPattern = "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d*\\s+\\S+\\s+\\[" + + ".*\\]\\s+\\S+:\\s+"; String unixTimePattern = "\\D" + currentTimePrefix + "\\d{6}\\D"; String unixTimeMillisPattern = "\\D" + currentTimePrefix + "\\d{9}\\D"; @@ -119,12 +129,15 @@ void initFilterSet() { + "|SCR|SEL|STATS|TS|UDTF|UNION)_\\d+\""; filterSet = new RegexFilterSet() + .addFilter(logPattern,"") + .addFilter("Getting log thread is interrupted, since query is done!\n","") .addFilter(scratchDirectory.toString() + "[\\w\\-/]+", "!!{hive.exec.scratchdir}!!") .addFilter(warehouseDirectory.toString(), "!!{hive.metastore.warehouse.dir}!!") .addFilter(expectedDirectory.toString(), "!!{expectedDirectory}!!") .addFilter(outputDirectory.toString(), "!!{outputDirectory}!!") .addFilter(qFileDirectory.toString(), "!!{qFileDirectory}!!") .addFilter(hiveRootDirectory.toString(), "!!{hive.root}!!") + .addFilter("\\(queryId=[^\\)]*\\)","queryId=(!!{queryId}!!)") .addFilter("file:/\\w\\S+", "file:/!!ELIDED!!") .addFilter("pfile:/\\w\\S+", "pfile:/!!ELIDED!!") .addFilter("hdfs:/\\w\\S+", "hdfs:/!!ELIDED!!") @@ -134,6 +147,7 @@ void initFilterSet() { .addFilter("(\\D)" + currentTimePrefix + "\\d{9}(\\D)", "$1!!UNIXTIMEMILLIS!!$2") .addFilter(userName, "!!{user.name}!!") .addFilter(operatorPattern, "\"$1_!!ELIDED!!\"") + .addFilter("Time taken: [0-9\\.]* seconds", "Time taken: !!ELIDED!! seconds") ; }; @@ -219,7 +233,7 @@ private void setUp() { "USE `" + testname + "`;", "set test.data.dir=" + testDataDirectory + ";", "set test.script.dir=" + testScriptDirectory + ";", - "!run " + testScriptDirectory + "/q_test_init.sql", + "!run " + initScript, }); } @@ -228,6 +242,7 @@ private void tearDown() { "!set outputformat table", "USE default;", "DROP DATABASE IF EXISTS `" + testname + "` CASCADE;", + "!run " + cleanupScript, }); } @@ -295,12 +310,61 @@ public boolean hasExpectedResults() { return expectedFile.exists(); } - public boolean compareResults() throws IOException { + public boolean compareResults() throws IOException, InterruptedException { if (!expectedFile.exists()) { LOG.error("Expected results file does not exist: " + expectedFile); return false; } - return FileUtils.contentEquals(expectedFile, outputFile); + return executeDiff(); + } + + private boolean executeDiff() throws IOException, InterruptedException { + ArrayList diffCommandArgs = new ArrayList(); + diffCommandArgs.add("diff"); + + // Text file comparison + diffCommandArgs.add("-a"); + + if (Shell.WINDOWS) { + // Ignore changes in the amount of white space + diffCommandArgs.add("-b"); + + // Files created on Windows machines have different line endings + // than files created on Unix/Linux. Windows uses carriage return and line feed + // ("\r\n") as a line ending, whereas Unix uses just line feed ("\n"). + // Also StringBuilder.toString(), Stream to String conversions adds extra + // spaces at the end of the line. + diffCommandArgs.add("--strip-trailing-cr"); // Strip trailing carriage return on input + diffCommandArgs.add("-B"); // Ignore changes whose lines are all blank + } + + // Add files to compare to the arguments list + diffCommandArgs.add(getQuotedString(expectedFile)); + diffCommandArgs.add(getQuotedString(outputFile)); + + System.out.println("Running: " + org.apache.commons.lang.StringUtils.join(diffCommandArgs, + ' ')); + Process executor = Runtime.getRuntime().exec(diffCommandArgs.toArray( + new String[diffCommandArgs.size()])); + + StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, System.err); + StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, System.out); + + outPrinter.start(); + errPrinter.start(); + + int result = executor.waitFor(); + + outPrinter.join(); + errPrinter.join(); + + executor.waitFor(); + + return (result == 0); + } + + private static String getQuotedString(File file) { + return Shell.WINDOWS ? String.format("\"%s\"", file.getAbsolutePath()) : file.getAbsolutePath(); } public void overwriteResults() { diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DisabledTestBeeLineDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestBeeLineDriver.java similarity index 94% rename from itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DisabledTestBeeLineDriver.java rename to itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestBeeLineDriver.java index cb276e6..24eeb9d 100644 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/DisabledTestBeeLineDriver.java +++ itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestBeeLineDriver.java @@ -31,7 +31,7 @@ import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) -public class DisabledTestBeeLineDriver { +public class TestBeeLineDriver { static CliAdapter adapter = new CliConfigs.BeeLineConfig().getCliAdapter(); @@ -49,7 +49,7 @@ private String name; private File qfile; - public DisabledTestBeeLineDriver(String name, File qfile) { + public TestBeeLineDriver(String name, File qfile) { this.name = name; this.qfile = qfile; } diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index d344464..467a2ed 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -718,163 +718,7 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\ encryption_with_trash.q \ encryption_ctas.q -beeline.positive.exclude=add_part_exist.q,\ - alter1.q,\ - alter2.q,\ - alter4.q,\ - alter5.q,\ - alter_rename_partition.q,\ - alter_rename_partition_authorization.q,\ - archive.q,\ - archive_corrupt.q,\ - archive_mr_1806.q,\ - archive_multi.q,\ - archive_multi_mr_1806.q,\ - authorization_1.q,\ - authorization_2.q,\ - authorization_4.q,\ - authorization_5.q,\ - authorization_6.q,\ - authorization_7.q,\ - ba_table1.q,\ - ba_table2.q,\ - ba_table3.q,\ - ba_table_udfs.q,\ - binary_table_bincolserde.q,\ - binary_table_colserde.q,\ - cluster.q,\ - columnarserde_create_shortcut.q,\ - combine2.q,\ - constant_prop.q,\ - create_nested_type.q,\ - create_or_replace_view.q,\ - create_struct_table.q,\ - create_union_table.q,\ - database.q,\ - database_location.q,\ - database_properties.q,\ - describe_database_json.q,\ - drop_database_removes_partition_dirs.q,\ - escape1.q,\ - escape2.q,\ - exim_00_nonpart_empty.q,\ - exim_01_nonpart.q,\ - exim_02_00_part_empty.q,\ - exim_02_part.q,\ - exim_03_nonpart_over_compat.q,\ - exim_04_all_part.q,\ - exim_04_evolved_parts.q,\ - exim_05_some_part.q,\ - exim_06_one_part.q,\ - exim_07_all_part_over_nonoverlap.q,\ - exim_08_nonpart_rename.q,\ - exim_09_part_spec_nonoverlap.q,\ - exim_10_external_managed.q,\ - exim_11_managed_external.q,\ - exim_12_external_location.q,\ - exim_13_managed_location.q,\ - exim_14_managed_location_over_existing.q,\ - exim_15_external_part.q,\ - exim_16_part_external.q,\ - exim_17_part_managed.q,\ - exim_18_part_external.q,\ - exim_19_00_part_external_location.q,\ - exim_19_part_external_location.q,\ - exim_20_part_managed_location.q,\ - exim_21_export_authsuccess.q,\ - exim_22_import_exist_authsuccess.q,\ - exim_23_import_part_authsuccess.q,\ - exim_24_import_nonexist_authsuccess.q,\ - global_limit.q,\ - groupby_complex_types.q,\ - groupby_complex_types_multi_single_reducer.q,\ - index_auth.q,\ - index_auto.q,\ - index_auto_empty.q,\ - index_bitmap.q,\ - index_bitmap1.q,\ - index_bitmap2.q,\ - index_bitmap3.q,\ - index_bitmap_auto.q,\ - index_bitmap_rc.q,\ - index_compact.q,\ - index_compact_1.q,\ - index_compact_2.q,\ - index_compact_3.q,\ - index_stale_partitioned.q,\ - init_file.q,\ - input16.q,\ - input16_cc.q,\ - input46.q,\ - input_columnarserde.q,\ - input_dynamicserde.q,\ - input_lazyserde.q,\ - input_testxpath3.q,\ - input_testxpath4.q,\ - insert2_overwrite_partitions.q,\ - insertexternal1.q,\ - join_thrift.q,\ - lateral_view.q,\ - load_binary_data.q,\ - load_exist_part_authsuccess.q,\ - load_nonpart_authsuccess.q,\ - load_part_authsuccess.q,\ - loadpart_err.q,\ - lock1.q,\ - lock2.q,\ - lock3.q,\ - lock4.q,\ - merge_dynamic_partition.q,\ - multi_insert.q,\ - multi_insert_move_tasks_share_dependencies.q,\ - null_column.q,\ - ppd_clusterby.q,\ - query_with_semi.q,\ - rename_column.q,\ - sample6.q,\ - sample_islocalmode_hook.q,\ - set_processor_namespaces.q,\ - show_tables.q,\ - source.q,\ - split_sample.q,\ - str_to_map.q,\ - transform1.q,\ - udaf_collect_set.q,\ - udaf_context_ngrams.q,\ - udaf_histogram_numeric.q,\ - udaf_ngrams.q,\ - udaf_percentile_approx.q,\ - udf_array.q,\ - udf_bitmap_and.q,\ - udf_bitmap_or.q,\ - udf_explode.q,\ - udf_format_number.q,\ - udf_map.q,\ - udf_map_keys.q,\ - udf_map_values.q,\ - udf_mask.q,\ - udf_mask_first_n.q,\ - udf_mask_hash.q,\ - udf_mask_last_n.q,\ - udf_mask_show_first_n.q,\ - udf_mask_show_last_n.q,\ - udf_max.q,\ - udf_min.q,\ - udf_named_struct.q,\ - udf_percentile.q,\ - udf_printf.q,\ - udf_sentences.q,\ - udf_sort_array.q,\ - udf_sort_array_by.q,\ - udf_split.q,\ - udf_struct.q,\ - udf_substr.q,\ - udf_translate.q,\ - udf_union.q,\ - udf_xpath.q,\ - udtf_stack.q,\ - view.q,\ - virtual_column.q +beeline.positive.include=escape_comments.q minimr.query.negative.files=cluster_tasklog_retrieval.q,\ file_with_header_footer_negative.q,\ diff --git itests/util/pom.xml itests/util/pom.xml index 6d93dc1..3740af8 100644 --- itests/util/pom.xml +++ itests/util/pom.xml @@ -71,6 +71,11 @@ org.apache.hive + hive-beeline + ${project.version} + + + org.apache.hive hive-hbase-handler ${project.version} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java index af8ec67..67064b8 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java @@ -406,19 +406,17 @@ public ContribNegativeCliConfig() { public static class BeeLineConfig extends AbstractCliConfig { public BeeLineConfig() { - // FIXME: beeline is disabled... - super(null); - // super(CoreBeeLineDriver.class); + super(CoreBeeLineDriver.class); try { setQueryDir("ql/src/test/queries/clientpositive"); - excludesFrom(testConfigProps, "beeline.positive.exclude"); + includesFrom(testConfigProps, "beeline.positive.include"); - setResultsDir("ql/src/test/results/clientpositive"); + setResultsDir("ql/src/test/results/clientpositive/beeline"); setLogDir("itests/qtest/target/qfile-results/beelinepositive"); - setInitScript("q_test_init.sql"); - setCleanupScript("q_test_cleanup.sql"); + setInitScript("q_test_init_src.sql"); + setCleanupScript("q_test_cleanup_src.sql"); setHiveConfDir(""); setClusterType(MiniClusterType.none); @@ -434,8 +432,6 @@ public AccumuloCliConfig() { try { setQueryDir("accumulo-handler/src/test/queries/positive"); - excludesFrom(testConfigProps, "beeline.positive.exclude"); - setResultsDir("accumulo-handler/src/test/results/positive"); setLogDir("itests/qtest/target/qfile-results/accumulo-handler/positive"); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index e5144e3..cdd0d67 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -16,104 +16,87 @@ * limitations under the License. */ package org.apache.hadoop.hive.cli.control; -//beeline is excluded by default -//AFAIK contains broken tests -//and produces compile errors...i'll comment out this whole class for now... -/* import static org.junit.Assert.fail; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.*; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hive.beeline.util.QFileClient; -import org.apache.hive.service.server.HiveServer2; +import org.apache.hive.jdbc.miniHS2.MiniHS2; import org.junit.AfterClass; import org.junit.BeforeClass; -// HIVE-14444: i've dropped this: @RunWith(ConcurrentTestRunner.class) + +import java.util.HashMap; + public class CoreBeeLineDriver extends CliAdapter { private final String hiveRootDirectory = AbstractCliConfig.HIVE_ROOT; private final String queryDirectory; private final String logDirectory; private final String resultsDirectory; + private final String initScript; + private final String cleanupScript; private boolean overwrite = false; - private static String scratchDirectory; - private static QTestUtil.QTestSetup miniZKCluster = null; - - private static HiveServer2 hiveServer2; + private MiniHS2 miniHS2; +// private static QTestUtil.QTestSetup miniZKCluster = null; public CoreBeeLineDriver(AbstractCliConfig testCliConfig) { super(testCliConfig); queryDirectory = testCliConfig.getQueryDirectory(); logDirectory = testCliConfig.getLogDir(); resultsDirectory = testCliConfig.getResultsDir(); + initScript = testCliConfig.getInitScript(); + cleanupScript = testCliConfig.getCleanupScript(); } @Override @BeforeClass public void beforeClass() throws Exception { - HiveConf hiveConf = new HiveConf(); - hiveConf.logVars(System.err); - System.err.flush(); - - scratchDirectory = hiveConf.getVar(SCRATCHDIR); - String testOutputOverwrite = System.getProperty("test.output.overwrite"); if (testOutputOverwrite != null && "true".equalsIgnoreCase(testOutputOverwrite)) { overwrite = true; } - miniZKCluster = new QTestUtil.QTestSetup(); - miniZKCluster.preTest(hiveConf); - - System.setProperty("hive.zookeeper.quorum", - hiveConf.get("hive.zookeeper.quorum")); - System.setProperty("hive.zookeeper.client.port", - hiveConf.get("hive.zookeeper.client.port")); - String disableserver = System.getProperty("test.service.disable.server"); if (null != disableserver && disableserver.equalsIgnoreCase("true")) { - System.err.println("test.service.disable.server=true " - + "Skipping HiveServer2 initialization!"); + System.err.println("test.service.disable.server=true Skipping HiveServer2 initialization!"); return; } - hiveServer2 = new HiveServer2(); - hiveServer2.init(hiveConf); - System.err.println("Starting HiveServer2..."); - hiveServer2.start(); - Thread.sleep(5000); + HiveConf hiveConf = new HiveConf(); + // We do not need Zookeeper at the moment + hiveConf.set(HiveConf.ConfVars.HIVE_LOCK_MANAGER.varname, + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"); + + // But if we need later we can enable it with this, or create one ourself +// miniZKCluster = new QTestUtil.QTestSetup(); +// miniZKCluster.preTest(hiveConf); + + hiveConf.logVars(System.err); + System.err.flush(); + + miniHS2 = new MiniHS2.Builder().withConf(hiveConf).cleanupLocalDirOnStartup(false).build(); + + miniHS2.start(new HashMap()); } @Override @AfterClass - public void shutdown() { - try { - if (hiveServer2 != null) { - System.err.println("Stopping HiveServer2..."); - hiveServer2.stop(); - } - } catch (Throwable t) { - t.printStackTrace(); - } - - if (miniZKCluster != null) { - try { - miniZKCluster.tearDown(); - } catch (Exception e) { - e.printStackTrace(); - } + public void shutdown() throws Exception { + if (miniHS2 != null) { + miniHS2.stop(); } +// if (miniZKCluster != null) { +// miniZKCluster.tearDown(); +// } } public void runTest(String qFileName) throws Exception { - QFileClient qClient = new QFileClient(new HiveConf(), hiveRootDirectory, - queryDirectory, logDirectory, resultsDirectory) + QFileClient qClient = new QFileClient(miniHS2.getHiveConf(), hiveRootDirectory, + queryDirectory, logDirectory, resultsDirectory, initScript, cleanupScript) .setQFileName(qFileName) .setUsername("user") .setPassword("password") - .setJdbcUrl("jdbc:hive2://localhost:10000") + .setJdbcUrl(miniHS2.getJdbcURL()) .setJdbcDriver("org.apache.hive.jdbc.HiveDriver") .setTestDataDirectory(hiveRootDirectory + "/data/files") .setTestScriptDirectory(hiveRootDirectory + "/data/scripts"); @@ -150,22 +133,14 @@ public void runTest(String qFileName) throws Exception { @Override public void setUp() { - // TODO Auto-generated method stub - } @Override public void tearDown() { - // TODO Auto-generated method stub - } @Override public void runTest(String name, String name2, String absolutePath) throws Exception { runTest(name2); } - } - - -*/ \ No newline at end of file diff --git itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java similarity index 100% rename from itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java rename to itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java diff --git itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java similarity index 100% rename from itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java rename to itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java diff --git ql/src/test/results/clientpositive/beeline/escape_comments.q.out ql/src/test/results/clientpositive/beeline/escape_comments.q.out new file mode 100644 index 0000000..0cbc8d6 --- /dev/null +++ ql/src/test/results/clientpositive/beeline/escape_comments.q.out @@ -0,0 +1,416 @@ +>>> !run !!{qFileDirectory}!!/escape_comments.q +>>> create database escape_comments_db comment 'a\nb'; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): create database escape_comments_db comment 'a\nb' +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:null, properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): create database escape_comments_db comment 'a\nb' +PREHOOK: query: create database escape_comments_db comment 'a\nb' +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:escape_comments_db +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: create database escape_comments_db comment 'a\nb' +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:escape_comments_db +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query create database escape_comments_db comment 'a\nb' +No rows affected +>>> use escape_comments_db; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): use escape_comments_db +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:null, properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): use escape_comments_db +PREHOOK: query: use escape_comments_db +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:escape_comments_db +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: use escape_comments_db +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:escape_comments_db +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query use escape_comments_db +No rows affected +>>> create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' +partitioned by (p1 string comment 'a\nb'); +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' +partitioned by (p1 string comment 'a\nb') +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:null, properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' +partitioned by (p1 string comment 'a\nb') +PREHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' +partitioned by (p1 string comment 'a\nb') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_tbl1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' +partitioned by (p1 string comment 'a\nb') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query create table escape_comments_tbl1 +(col1 string comment 'a\nb\'\;') comment 'a\nb' +partitioned by (p1 string comment 'a\nb') +No rows affected +>>> create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1 +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col1, type:string, comment:null)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1 +PREHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_view1 +Starting task [Stage-1:DDL] in serial mode +POSTHOOK: query: create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_view1 +POSTHOOK: Lineage: escape_comments_view1.col1 SIMPLE [(escape_comments_tbl1)escape_comments_tbl1.FieldSchema(name:col1, type:string, comment:a +b';), ] +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query create view escape_comments_view1 (col1 comment 'a\nb') comment 'a\nb' +as select col1 from escape_comments_tbl1 +No rows affected +>>> create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb'; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:null, properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +PREHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +PREHOOK: type: CREATEINDEX +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +POSTHOOK: type: CREATEINDEX +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query create index index2 on table escape_comments_tbl1(col1) as 'COMPACT' with deferred rebuild comment 'a\nb' +No rows affected +>>> +>>> describe database extended escape_comments_db; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): describe database extended escape_comments_db +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:db_name, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer), FieldSchema(name:location, type:string, comment:from deserializer), FieldSchema(name:owner_name, type:string, comment:from deserializer), FieldSchema(name:owner_type, type:string, comment:from deserializer), FieldSchema(name:parameters, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): describe database extended escape_comments_db +PREHOOK: query: describe database extended escape_comments_db +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:escape_comments_db +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: describe database extended escape_comments_db +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:escape_comments_db +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query describe database extended escape_comments_db +'db_name','comment','location','owner_name','owner_type','parameters' +'escape_comments_db','a\nb','location/in/test','user','USER','' +1 row selected +>>> describe database escape_comments_db; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): describe database escape_comments_db +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:db_name, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer), FieldSchema(name:location, type:string, comment:from deserializer), FieldSchema(name:owner_name, type:string, comment:from deserializer), FieldSchema(name:owner_type, type:string, comment:from deserializer), FieldSchema(name:parameters, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): describe database escape_comments_db +PREHOOK: query: describe database escape_comments_db +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:escape_comments_db +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: describe database escape_comments_db +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:escape_comments_db +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query describe database escape_comments_db +'db_name','comment','location','owner_name','owner_type','parameters' +'escape_comments_db','a\nb','location/in/test','user','USER','' +1 row selected +>>> show create table escape_comments_tbl1; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): show create table escape_comments_tbl1 +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:createtab_stmt, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): show create table escape_comments_tbl1 +PREHOOK: query: show create table escape_comments_tbl1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: show create table escape_comments_tbl1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query show create table escape_comments_tbl1 +'createtab_stmt' +'CREATE TABLE `escape_comments_tbl1`(' +' `col1` string COMMENT 'a\nb\'\;')' +'COMMENT 'a\nb'' +'PARTITIONED BY ( ' +' `p1` string COMMENT 'a\nb')' +'ROW FORMAT SERDE ' +' 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' +'STORED AS INPUTFORMAT ' +' 'org.apache.hadoop.mapred.TextInputFormat' ' +'OUTPUTFORMAT ' +' 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'' +'LOCATION' +' '!!{hive.metastore.warehouse.dir}!!/escape_comments_db.db/escape_comments_tbl1'' +'TBLPROPERTIES (' +' 'transient_lastDdlTime'='!!UNIXTIME!!')' +15 rows selected +>>> describe formatted escape_comments_tbl1; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): describe formatted escape_comments_tbl1 +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): describe formatted escape_comments_tbl1 +PREHOOK: query: describe formatted escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: describe formatted escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query describe formatted escape_comments_tbl1 +'col_name','data_type','comment' +'# col_name ','data_type ','comment ' +'','NULL','NULL' +'col1','string','a\nb';' +'','NULL','NULL' +'# Partition Information','NULL','NULL' +'# col_name ','data_type ','comment ' +'','NULL','NULL' +'p1','string','a\nb' +'','NULL','NULL' +'# Detailed Table Information','NULL','NULL' +'Database: ','escape_comments_db ','NULL' +'Owner: ','user ','NULL' +'CreateTime: ','!!TIMESTAMP!!','NULL' +'LastAccessTime: ','UNKNOWN ','NULL' +'Retention: ','0 ','NULL' +'Location: ','!!{hive.metastore.warehouse.dir}!!/escape_comments_db.db/escape_comments_tbl1','NULL' +'Table Type: ','MANAGED_TABLE ','NULL' +'Table Parameters:','NULL','NULL' +'','comment ','a\nb ' +'','transient_lastDdlTime','!!UNIXTIME!! ' +'','NULL','NULL' +'# Storage Information','NULL','NULL' +'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','NULL' +'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','NULL' +'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','NULL' +'Compressed: ','No ','NULL' +'Num Buckets: ','-1 ','NULL' +'Bucket Columns: ','[] ','NULL' +'Sort Columns: ','[] ','NULL' +'Storage Desc Params:','NULL','NULL' +'','serialization.format','1 ' +31 rows selected +>>> describe pretty escape_comments_tbl1; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): describe pretty escape_comments_tbl1 +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): describe pretty escape_comments_tbl1 +PREHOOK: query: describe pretty escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: describe pretty escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query describe pretty escape_comments_tbl1 +'col_name','data_type','comment' +'col_name ','data_type ','comment' +'','','' +'col1 ','string ','a' +' ',' ','b';' +'p1 ','string ','a' +' ',' ','b' +'','NULL','NULL' +'# Partition Information','NULL','NULL' +'col_name ','data_type ','comment' +'','','' +'p1 ','string ','a' +' ',' ','b' +12 rows selected +>>> describe escape_comments_tbl1; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): describe escape_comments_tbl1 +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): describe escape_comments_tbl1 +PREHOOK: query: describe escape_comments_tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_tbl1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: describe escape_comments_tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_tbl1 +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query describe escape_comments_tbl1 +'col_name','data_type','comment' +'col1','string','a\nb';' +'p1','string','a\nb' +'','NULL','NULL' +'# Partition Information','NULL','NULL' +'# col_name ','data_type ','comment ' +'','NULL','NULL' +'p1','string','a\nb' +7 rows selected +>>> show create table escape_comments_view1; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): show create table escape_comments_view1 +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:createtab_stmt, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): show create table escape_comments_view1 +PREHOOK: query: show create table escape_comments_view1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: escape_comments_db@escape_comments_view1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: show create table escape_comments_view1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: escape_comments_db@escape_comments_view1 +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query show create table escape_comments_view1 +'createtab_stmt' +'CREATE VIEW `escape_comments_view1` AS SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1`' +1 row selected +>>> describe formatted escape_comments_view1; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): describe formatted escape_comments_view1 +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:col_name, type:string, comment:from deserializer), FieldSchema(name:data_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): describe formatted escape_comments_view1 +PREHOOK: query: describe formatted escape_comments_view1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: escape_comments_db@escape_comments_view1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: describe formatted escape_comments_view1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: escape_comments_db@escape_comments_view1 +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query describe formatted escape_comments_view1 +'col_name','data_type','comment' +'# col_name ','data_type ','comment ' +'','NULL','NULL' +'col1','string','a\nb' +'','NULL','NULL' +'# Detailed Table Information','NULL','NULL' +'Database: ','escape_comments_db ','NULL' +'Owner: ','user ','NULL' +'CreateTime: ','!!TIMESTAMP!!','NULL' +'LastAccessTime: ','UNKNOWN ','NULL' +'Retention: ','0 ','NULL' +'Table Type: ','VIRTUAL_VIEW ','NULL' +'Table Parameters:','NULL','NULL' +'','comment ','a\nb ' +'','transient_lastDdlTime','!!UNIXTIME!! ' +'','NULL','NULL' +'# Storage Information','NULL','NULL' +'SerDe Library: ','null ','NULL' +'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','NULL' +'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','NULL' +'Compressed: ','No ','NULL' +'Num Buckets: ','-1 ','NULL' +'Bucket Columns: ','[] ','NULL' +'Sort Columns: ','[] ','NULL' +'','NULL','NULL' +'# View Information','NULL','NULL' +'View Original Text: ','select col1 from escape_comments_tbl1','NULL' +'View Expanded Text: ','SELECT `col1` AS `col1` FROM (select `escape_comments_tbl1`.`col1` from `escape_comments_db`.`escape_comments_tbl1`) `escape_comments_db.escape_comments_view1`','NULL' +'View Rewrite Enabled:','No ','NULL' +28 rows selected +>>> show formatted index on escape_comments_tbl1; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): show formatted index on escape_comments_tbl1 +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:idx_name, type:string, comment:from deserializer), FieldSchema(name:tab_name, type:string, comment:from deserializer), FieldSchema(name:col_names, type:string, comment:from deserializer), FieldSchema(name:idx_tab_name, type:string, comment:from deserializer), FieldSchema(name:idx_type, type:string, comment:from deserializer), FieldSchema(name:comment, type:string, comment:from deserializer)], properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): show formatted index on escape_comments_tbl1 +PREHOOK: query: show formatted index on escape_comments_tbl1 +PREHOOK: type: SHOWINDEXES +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: show formatted index on escape_comments_tbl1 +POSTHOOK: type: SHOWINDEXES +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query show formatted index on escape_comments_tbl1 +'idx_name','tab_name','col_names','idx_tab_name','idx_type','comment' +'idx_name ','tab_name ','col_names ','idx_tab_name ','idx_type ','comment ' +'','NULL','NULL','NULL','NULL','NULL' +'','NULL','NULL','NULL','NULL','NULL' +'index2 ','escape_comments_tbl1','col1 ','escape_comments_db__escape_comments_tbl1_index2__','compact ','a\nb ' +4 rows selected +>>> +>>> drop database escape_comments_db cascade; +Acquired the compile lock. +Compiling commandqueryId=(!!{queryId}!!): drop database escape_comments_db cascade +Semantic Analysis Completed +Returning Hive schema: Schema(fieldSchemas:null, properties:null) +Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +Executing commandqueryId=(!!{queryId}!!): drop database escape_comments_db cascade +PREHOOK: query: drop database escape_comments_db cascade +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:escape_comments_db +PREHOOK: Output: database:escape_comments_db +PREHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +PREHOOK: Output: escape_comments_db@escape_comments_tbl1 +PREHOOK: Output: escape_comments_db@escape_comments_view1 +Starting task [Stage-0:DDL] in serial mode +POSTHOOK: query: drop database escape_comments_db cascade +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:escape_comments_db +POSTHOOK: Output: database:escape_comments_db +POSTHOOK: Output: escape_comments_db@escape_comments_db__escape_comments_tbl1_index2__ +POSTHOOK: Output: escape_comments_db@escape_comments_tbl1 +POSTHOOK: Output: escape_comments_db@escape_comments_view1 +Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds +OK +Shutting down query drop database escape_comments_db cascade +No rows affected +>>> !record